:zap: [Enhance] Use selected temperature
Browse files
apps/llm_mixer/js/chat_operator.js
CHANGED
@@ -37,7 +37,7 @@ export function get_selected_llm_model() {
|
|
37 |
}
|
38 |
|
39 |
export function get_selected_temperature() {
|
40 |
-
return temperature_select.val();
|
41 |
}
|
42 |
|
43 |
export function get_latest_messager_container() {
|
|
|
37 |
}
|
38 |
|
39 |
export function get_selected_temperature() {
|
40 |
+
return Number(temperature_select.val());
|
41 |
}
|
42 |
|
43 |
export function get_latest_messager_container() {
|
apps/llm_mixer/js/llm_requester.js
CHANGED
@@ -8,6 +8,7 @@ import {
|
|
8 |
create_messager,
|
9 |
get_request_messages,
|
10 |
get_selected_llm_model,
|
|
|
11 |
} from "./chat_operator.js";
|
12 |
|
13 |
export class ChatCompletionsRequester {
|
@@ -20,7 +21,8 @@ export class ChatCompletionsRequester {
|
|
20 |
) {
|
21 |
this.prompt = prompt;
|
22 |
this.model = model || get_selected_llm_model() || "gpt-turbo-3.5";
|
23 |
-
this.temperature =
|
|
|
24 |
this.endpoint = endpoint || secrets.openai_endpoint;
|
25 |
this.cors_proxy = cors_proxy || secrets.cors_proxy;
|
26 |
this.request_endpoint = this.cors_proxy + this.endpoint;
|
|
|
8 |
create_messager,
|
9 |
get_request_messages,
|
10 |
get_selected_llm_model,
|
11 |
+
get_selected_temperature,
|
12 |
} from "./chat_operator.js";
|
13 |
|
14 |
export class ChatCompletionsRequester {
|
|
|
21 |
) {
|
22 |
this.prompt = prompt;
|
23 |
this.model = model || get_selected_llm_model() || "gpt-turbo-3.5";
|
24 |
+
this.temperature =
|
25 |
+
temperature !== null ? temperature : get_selected_temperature();
|
26 |
this.endpoint = endpoint || secrets.openai_endpoint;
|
27 |
this.cors_proxy = cors_proxy || secrets.cors_proxy;
|
28 |
this.request_endpoint = this.cors_proxy + this.endpoint;
|