File size: 3,389 Bytes
6e6dab9 b8f3280 451c088 fb77726 68f11c9 451c088 6e6dab9 fb77726 6e6dab9 fb77726 68f11c9 b8f3280 6e6dab9 451c088 6e6dab9 b8f3280 6e6dab9 451c088 6e6dab9 ed18d7d 451c088 ed18d7d 451c088 b8f3280 6e6dab9 451c088 6e6dab9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
import {
jsonize_stream_data,
stringify_stream_bytes,
} from "./stream_jsonizer.js";
import * as secrets from "./secrets.js";
import {
update_message,
create_messager,
get_request_messages,
get_selected_llm_model,
get_selected_temperature,
} from "./chat_operator.js";
export class ChatCompletionsRequester {
constructor(
prompt,
model = null,
temperature = null,
endpoint = null,
cors_proxy = null
) {
this.prompt = prompt;
this.model = model || get_selected_llm_model() || "gpt-turbo-3.5";
this.temperature =
temperature !== null ? temperature : get_selected_temperature();
this.endpoint = endpoint || secrets.openai_endpoint;
this.cors_proxy = cors_proxy || secrets.cors_proxy;
this.request_endpoint = this.cors_proxy + this.endpoint;
this.controller = new AbortController();
}
construct_request_messages() {
this.request_messages = get_request_messages();
}
construct_request_headers() {
this.request_headers = {
"Content-Type": "application/json",
Authorization: `Bearer ${secrets.openai_api_key}`,
};
}
construct_request_body() {
this.construct_request_messages();
this.request_body = {
model: this.model,
messages: this.request_messages,
temperature: this.temperature,
stream: true,
};
}
construct_request_params() {
this.construct_request_headers();
this.construct_request_body();
this.request_params = {
method: "POST",
headers: this.request_headers,
body: JSON.stringify(this.request_body),
signal: this.controller.signal,
};
}
create_messager_components() {
create_messager("user", this.prompt);
create_messager("assistant", "", this.model, this.temperature);
}
post() {
this.construct_request_params();
return fetch(this.request_endpoint, this.request_params)
.then((response) => response.body)
.then((rb) => {
const reader = rb.getReader();
return reader.read().then(function process({ done, value }) {
if (done) {
return;
}
let json_chunks = jsonize_stream_data(
stringify_stream_bytes(value)
);
update_message(json_chunks);
return reader.read().then(process);
});
})
.catch((error) => console.error("Error:", error));
}
stop() {
this.controller.abort();
}
}
export var available_models = [];
export function request_available_models() {
var url = "https://magic-api.ninomae.live/v1/models";
let request_options = {
method: "GET",
};
return fetch(url, request_options)
.then((response) => response.json())
.then((response_json) => {
response_json.data.forEach((item) => {
available_models.push(item.id);
});
available_models.sort();
console.log(available_models);
})
.catch((error) => {
console.error("Error:", error);
});
}
|