:zap: [Enhance] ChatCompletionsRequester: Set default temperature 0.5 and default top_p 0.95
Browse files
components/buttons_binder.js
CHANGED
@@ -99,7 +99,9 @@ class SendUserInputButtonBinder {
|
|
99 |
if (get_selected_llm_model() == "notes") {
|
100 |
create_messager("user", user_input_content);
|
101 |
} else {
|
102 |
-
this.requester = new ChatCompletionsRequester(
|
|
|
|
|
103 |
this.requester.create_messager_components();
|
104 |
start_latest_message_animation();
|
105 |
let requester_post = this.requester.post();
|
|
|
99 |
if (get_selected_llm_model() == "notes") {
|
100 |
create_messager("user", user_input_content);
|
101 |
} else {
|
102 |
+
this.requester = new ChatCompletionsRequester({
|
103 |
+
prompt: user_input_content,
|
104 |
+
});
|
105 |
this.requester.create_messager_components();
|
106 |
start_latest_message_animation();
|
107 |
let requester_post = this.requester.post();
|
networks/llm_requester.js
CHANGED
@@ -11,18 +11,19 @@ import {
|
|
11 |
} from "../components/chat_operator.js";
|
12 |
|
13 |
export class ChatCompletionsRequester {
|
14 |
-
constructor(
|
15 |
prompt,
|
16 |
model = null,
|
17 |
-
temperature =
|
18 |
-
|
19 |
-
|
|
|
20 |
this.prompt = prompt;
|
21 |
this.openai_endpoint =
|
22 |
openai_endpoint || this.extract_endpoint_and_model()[0];
|
23 |
this.model = model || this.extract_endpoint_and_model()[1];
|
24 |
-
this.temperature =
|
25 |
-
|
26 |
this.backend_request_endpoint = "/chat/completions";
|
27 |
this.controller = new AbortController();
|
28 |
}
|
@@ -51,6 +52,7 @@ export class ChatCompletionsRequester {
|
|
51 |
model: this.model,
|
52 |
messages: this.openai_request_messages,
|
53 |
temperature: this.temperature,
|
|
|
54 |
stream: true,
|
55 |
},
|
56 |
};
|
|
|
11 |
} from "../components/chat_operator.js";
|
12 |
|
13 |
export class ChatCompletionsRequester {
|
14 |
+
constructor({
|
15 |
prompt,
|
16 |
model = null,
|
17 |
+
temperature = 0.5,
|
18 |
+
top_p = 0.95,
|
19 |
+
openai_endpoint = null,
|
20 |
+
} = {}) {
|
21 |
this.prompt = prompt;
|
22 |
this.openai_endpoint =
|
23 |
openai_endpoint || this.extract_endpoint_and_model()[0];
|
24 |
this.model = model || this.extract_endpoint_and_model()[1];
|
25 |
+
this.temperature = temperature;
|
26 |
+
this.top_p = top_p;
|
27 |
this.backend_request_endpoint = "/chat/completions";
|
28 |
this.controller = new AbortController();
|
29 |
}
|
|
|
52 |
model: this.model,
|
53 |
messages: this.openai_request_messages,
|
54 |
temperature: this.temperature,
|
55 |
+
top_p: this.top_p,
|
56 |
stream: true,
|
57 |
},
|
58 |
};
|