:gem: [Feature] Support chat with selected llm models
Browse files
apps/llm_mixer/index.html
CHANGED
@@ -14,16 +14,18 @@
|
|
14 |
<div class="container">
|
15 |
<div id="messagers-container" class="mt-3"></div>
|
16 |
<div class="fixed-bottom m-3">
|
17 |
-
<div class="container-fluid">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
<div class="row no-gutters">
|
19 |
-
<div class="col-auto">
|
20 |
-
<select
|
21 |
-
class="form-select"
|
22 |
-
id="available-models-select"
|
23 |
-
>
|
24 |
-
<option value="gpt-3.5-turbo">GPT-3.5</option>
|
25 |
-
</select>
|
26 |
-
</div>
|
27 |
<div class="col">
|
28 |
<textarea
|
29 |
id="user-input"
|
|
|
14 |
<div class="container">
|
15 |
<div id="messagers-container" class="mt-3"></div>
|
16 |
<div class="fixed-bottom m-3">
|
17 |
+
<div class="container-fluid mt-2">
|
18 |
+
<div class="col-auto">
|
19 |
+
<select
|
20 |
+
class="form-select"
|
21 |
+
id="available-models-select"
|
22 |
+
>
|
23 |
+
<option value="gpt-3.5-turbo">GPT-3.5</option>
|
24 |
+
</select>
|
25 |
+
</div>
|
26 |
+
</div>
|
27 |
+
<div class="container-fluid mt-2">
|
28 |
<div class="row no-gutters">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
<div class="col">
|
30 |
<textarea
|
31 |
id="user-input"
|
apps/llm_mixer/js/chat_operator.js
CHANGED
@@ -3,6 +3,8 @@ import { Messager, MessagerList } from "./messager.js";
|
|
3 |
let messagers_container = $("#messagers-container");
|
4 |
let messager_list = new MessagerList(messagers_container);
|
5 |
|
|
|
|
|
6 |
export function create_messager(
|
7 |
role,
|
8 |
content = "",
|
@@ -19,6 +21,10 @@ export function create_messager(
|
|
19 |
messager_list.push(messager);
|
20 |
}
|
21 |
|
|
|
|
|
|
|
|
|
22 |
export function get_latest_message_viewer() {
|
23 |
return messagers_container.children().last();
|
24 |
}
|
|
|
3 |
let messagers_container = $("#messagers-container");
|
4 |
let messager_list = new MessagerList(messagers_container);
|
5 |
|
6 |
+
let available_models_select = $("#available-models-select");
|
7 |
+
|
8 |
export function create_messager(
|
9 |
role,
|
10 |
content = "",
|
|
|
21 |
messager_list.push(messager);
|
22 |
}
|
23 |
|
24 |
+
export function get_selected_llm_model() {
|
25 |
+
return available_models_select.val();
|
26 |
+
}
|
27 |
+
|
28 |
export function get_latest_message_viewer() {
|
29 |
return messagers_container.children().last();
|
30 |
}
|
apps/llm_mixer/js/default.css
CHANGED
@@ -31,5 +31,5 @@
|
|
31 |
}
|
32 |
|
33 |
#available-models-select {
|
34 |
-
max-width:
|
35 |
}
|
|
|
31 |
}
|
32 |
|
33 |
#available-models-select {
|
34 |
+
max-width: 250px;
|
35 |
}
|
apps/llm_mixer/js/llm_models_loader.js
CHANGED
@@ -9,10 +9,11 @@ export async function setup_available_models_on_select(default_option = null) {
|
|
9 |
}
|
10 |
|
11 |
available_models.forEach((value, index) => {
|
12 |
-
const option = new Option(value,
|
13 |
select.append(option);
|
14 |
if (value === default_option) {
|
15 |
$(option).prop("selected", true);
|
16 |
}
|
17 |
});
|
|
|
18 |
}
|
|
|
9 |
}
|
10 |
|
11 |
available_models.forEach((value, index) => {
|
12 |
+
const option = new Option(value, value);
|
13 |
select.append(option);
|
14 |
if (value === default_option) {
|
15 |
$(option).prop("selected", true);
|
16 |
}
|
17 |
});
|
18 |
+
console.log(`Default model: ${select.val()}`);
|
19 |
}
|
apps/llm_mixer/js/llm_requester.js
CHANGED
@@ -7,21 +7,20 @@ import {
|
|
7 |
update_message,
|
8 |
create_messager,
|
9 |
get_request_messages,
|
|
|
10 |
} from "./chat_operator.js";
|
11 |
|
12 |
export class ChatCompletionsRequester {
|
13 |
constructor(
|
14 |
prompt,
|
15 |
-
model =
|
16 |
-
temperature =
|
17 |
-
|
18 |
-
|
19 |
-
cors_proxy
|
20 |
) {
|
21 |
this.prompt = prompt;
|
22 |
-
this.model = model;
|
23 |
-
this.temperature = temperature;
|
24 |
-
this.messages = messages;
|
25 |
this.endpoint = endpoint || secrets.openai_endpoint;
|
26 |
this.cors_proxy = cors_proxy || secrets.cors_proxy;
|
27 |
this.request_endpoint = this.cors_proxy + this.endpoint;
|
|
|
7 |
update_message,
|
8 |
create_messager,
|
9 |
get_request_messages,
|
10 |
+
get_selected_llm_model,
|
11 |
} from "./chat_operator.js";
|
12 |
|
13 |
export class ChatCompletionsRequester {
|
14 |
constructor(
|
15 |
prompt,
|
16 |
+
model = null,
|
17 |
+
temperature = null,
|
18 |
+
endpoint = null,
|
19 |
+
cors_proxy = null
|
|
|
20 |
) {
|
21 |
this.prompt = prompt;
|
22 |
+
this.model = model || get_selected_llm_model() || "gpt-turbo-3.5";
|
23 |
+
this.temperature = temperature !== null ? temperature : 0;
|
|
|
24 |
this.endpoint = endpoint || secrets.openai_endpoint;
|
25 |
this.cors_proxy = cors_proxy || secrets.cors_proxy;
|
26 |
this.request_endpoint = this.cors_proxy + this.endpoint;
|