import { Message } from '@/types/chat'; import { OpenAIModel } from '@/types/openai'; import { AZURE_DEPLOYMENT_ID, OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from '../app/const'; import { ParsedEvent, ReconnectInterval, createParser, } from 'eventsource-parser'; export class OpenAIError extends Error { type: string; param: string; code: string; constructor(message: string, type: string, param: string, code: string) { super(message); this.name = 'OpenAIError'; this.type = type; this.param = param; this.code = code; } } export const OpenAIStream = async ( model: OpenAIModel, systemPrompt: string, temperature : number, key: string, messages: Message[], ) => { let url = `${OPENAI_API_HOST}/v1/chat/completions`; const res = await fetch(url, { headers: { 'Content-Type': 'application/json' }, method: 'POST', body: JSON.stringify({ messages: [ { role: 'system', content: systemPrompt, }, ...messages, ], max_tokens: 1000, temperature: temperature, stream: true, }), }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); if (res.status !== 200) { const result = await res.json(); if (result.error) { throw new OpenAIError( result.error.message, result.error.type, result.error.param, result.error.code, ); } else { throw new Error( `API returned an error: ${ decoder.decode(result?.value) || result.statusText }`, ); } } const stream = new ReadableStream({ async start(controller) { const onParse = (event: ParsedEvent | ReconnectInterval) => { console.log(event); if (event.type === 'event') { const data = event.data; if (data === '<|end|>') { controller.close(); return; } try { const queue = encoder.encode(data); controller.enqueue(queue); } catch (e) { controller.error(e); } } }; const parser = createParser(onParse); for await (const chunk of res.body as any) { parser.feed(decoder.decode(chunk)); } }, }); return stream; };