matt HOFFNER commited on
Commit
b202e70
1 Parent(s): c790ee9
Files changed (2) hide show
  1. pages/api/chat.ts +6 -1
  2. utils/server/index.ts +10 -22
pages/api/chat.ts CHANGED
@@ -15,7 +15,12 @@ export const config = {
15
 
16
  const handler = async (req: Request): Promise<Response> => {
17
  try {
18
- const { model, messages, key, prompt, temperature } = (await req.json()) as ChatBody;
 
 
 
 
 
19
 
20
  await init((imports) => WebAssembly.instantiate(wasm, imports));
21
  const encoding = new Tiktoken(
 
15
 
16
  const handler = async (req: Request): Promise<Response> => {
17
  try {
18
+ const chatBody = (await req.json()) as ChatBody;
19
+ let prompt = chatBody?.prompt;
20
+ let temperature = chatBody?.temperature;
21
+ let messages = chatBody?.messages;
22
+ let model = chatBody?.model;
23
+ let key = chatBody?.key;
24
 
25
  await init((imports) => WebAssembly.instantiate(wasm, imports));
26
  const encoding = new Tiktoken(
utils/server/index.ts CHANGED
@@ -31,25 +31,12 @@ export const OpenAIStream = async (
31
  messages: Message[],
32
  ) => {
33
  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
34
- if (OPENAI_API_TYPE === 'azure') {
35
- url = `${OPENAI_API_HOST}/openai/deployments/${AZURE_DEPLOYMENT_ID}/chat/completions?api-version=${OPENAI_API_VERSION}`;
36
- }
37
  const res = await fetch(url, {
38
  headers: {
39
- 'Content-Type': 'application/json',
40
- ...(OPENAI_API_TYPE === 'openai' && {
41
- Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
42
- }),
43
- ...(OPENAI_API_TYPE === 'azure' && {
44
- 'api-key': `${key ? key : process.env.OPENAI_API_KEY}`
45
- }),
46
- ...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && {
47
- 'OpenAI-Organization': OPENAI_ORGANIZATION,
48
- }),
49
  },
50
  method: 'POST',
51
  body: JSON.stringify({
52
- ...(OPENAI_API_TYPE === 'openai' && {model: model.id}),
53
  messages: [
54
  {
55
  role: 'system',
@@ -77,7 +64,7 @@ export const OpenAIStream = async (
77
  );
78
  } else {
79
  throw new Error(
80
- `OpenAI API returned an error: ${
81
  decoder.decode(result?.value) || result.statusText
82
  }`,
83
  );
@@ -87,17 +74,18 @@ export const OpenAIStream = async (
87
  const stream = new ReadableStream({
88
  async start(controller) {
89
  const onParse = (event: ParsedEvent | ReconnectInterval) => {
 
90
  if (event.type === 'event') {
91
  const data = event.data;
 
 
 
 
92
 
93
  try {
94
- const json = JSON.parse(data);
95
- if (json.choices[0].finish_reason != null) {
96
- controller.close();
97
- return;
98
- }
99
- const text = json.choices[0].delta.content;
100
- const queue = encoder.encode(text);
101
  controller.enqueue(queue);
102
  } catch (e) {
103
  controller.error(e);
 
31
  messages: Message[],
32
  ) => {
33
  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
 
 
 
34
  const res = await fetch(url, {
35
  headers: {
36
+ 'Content-Type': 'application/json'
 
 
 
 
 
 
 
 
 
37
  },
38
  method: 'POST',
39
  body: JSON.stringify({
 
40
  messages: [
41
  {
42
  role: 'system',
 
64
  );
65
  } else {
66
  throw new Error(
67
+ `API returned an error: ${
68
  decoder.decode(result?.value) || result.statusText
69
  }`,
70
  );
 
74
  const stream = new ReadableStream({
75
  async start(controller) {
76
  const onParse = (event: ParsedEvent | ReconnectInterval) => {
77
+ console.log(event);
78
  if (event.type === 'event') {
79
  const data = event.data;
80
+ if (data === '<|end|>') {
81
+ controller.close();
82
+ return;
83
+ }
84
 
85
  try {
86
+
87
+ const queue = encoder.encode(data);
88
+
 
 
 
 
89
  controller.enqueue(queue);
90
  } catch (e) {
91
  controller.error(e);