|
import type { Conversation } from "$lib/types/Conversation"; |
|
import type { TextGenerationStreamOutput } from "@huggingface/inference"; |
|
import { endpointTgi, endpointTgiParametersSchema } from "./tgi/endpointTgi"; |
|
import { z } from "zod"; |
|
import endpointAws, { endpointAwsParametersSchema } from "./aws/endpointAws"; |
|
import { endpointOAIParametersSchema, endpointOai } from "./openai/endpointOai"; |
|
import endpointLlamacpp, { endpointLlamacppParametersSchema } from "./llamacpp/endpointLlamacpp"; |
|
import endpointOllama, { endpointOllamaParametersSchema } from "./ollama/endpointOllama"; |
|
|
|
|
|
interface EndpointParameters { |
|
conversation: { |
|
messages: Omit<Conversation["messages"][0], "id">[]; |
|
preprompt?: Conversation["preprompt"]; |
|
_id?: Conversation["_id"]; |
|
}; |
|
} |
|
|
|
interface CommonEndpoint { |
|
weight: number; |
|
} |
|
|
|
export type Endpoint = ( |
|
params: EndpointParameters |
|
) => Promise<AsyncGenerator<TextGenerationStreamOutput, void, void>>; |
|
|
|
|
|
export type EndpointGenerator<T extends CommonEndpoint> = (parameters: T) => Endpoint; |
|
|
|
|
|
export const endpoints = { |
|
tgi: endpointTgi, |
|
aws: endpointAws, |
|
openai: endpointOai, |
|
llamacpp: endpointLlamacpp, |
|
ollama: endpointOllama, |
|
}; |
|
|
|
export const endpointSchema = z.discriminatedUnion("type", [ |
|
endpointAwsParametersSchema, |
|
endpointOAIParametersSchema, |
|
endpointTgiParametersSchema, |
|
endpointLlamacppParametersSchema, |
|
endpointOllamaParametersSchema, |
|
]); |
|
export default endpoints; |
|
|