matt HOFFNER commited on
Commit
82ddd60
·
1 Parent(s): 5859b36
src/app/search/web/page.jsx CHANGED
@@ -5,53 +5,57 @@ import { MemoizedReactMarkdown } from '../../../components/MemoizedReactMarkdown
5
  export default function WebSearchPage({ searchParams }) {
6
  const [aiResponse, setAiResponse] = useState("");
7
  const [searchTerm, setSearchTerm] = useState()
8
- const [text, setText] = useState('');
9
 
10
  useEffect(() => {
11
  setSearchTerm(searchParams.searchTerm)
12
  }, [searchParams])
13
 
14
-
15
-
16
- // Define the state for text
17
-
18
-
19
  useEffect(() => {
20
- const handleSend = async () => {
21
- const body = JSON.stringify({ question: searchTerm });
22
- const controller = new AbortController();
 
23
  const response = await fetch('/api/llm', {
24
  method: 'POST',
25
  headers: { 'Content-Type': 'application/json' },
26
- signal: controller.signal,
27
- body
28
  });
29
-
30
  if (!response.ok) {
31
- return;
32
- }
33
-
34
- const reader = response.body.getReader();
35
- const decoder = new TextDecoder();
36
- let resultText = '';
37
-
38
- while (true) {
39
- const { value, done } = await reader.read();
40
-
41
- if (done) {
42
- break;
 
 
 
 
 
 
 
 
 
 
 
43
  }
44
-
45
- const chunkValue = decoder.decode(value);
46
- resultText += chunkValue;
47
  }
48
-
49
- setText(resultText);
50
- };
51
-
52
- handleSend();
53
- }, []);
54
-
 
55
 
56
 
57
  console.log(aiResponse);
@@ -105,7 +109,7 @@ export default function WebSearchPage({ searchParams }) {
105
  },
106
  }}
107
  >
108
- {JSON.stringify(aiResponse)}
109
  </MemoizedReactMarkdown>
110
  </div>
111
  );
 
5
  export default function WebSearchPage({ searchParams }) {
6
  const [aiResponse, setAiResponse] = useState("");
7
  const [searchTerm, setSearchTerm] = useState()
 
8
 
9
  useEffect(() => {
10
  setSearchTerm(searchParams.searchTerm)
11
  }, [searchParams])
12
 
 
 
 
 
 
13
  useEffect(() => {
14
+ const controller = new AbortController();
15
+ const signal = controller.signal;
16
+
17
+ async function fetchData() {
18
  const response = await fetch('/api/llm', {
19
  method: 'POST',
20
  headers: { 'Content-Type': 'application/json' },
21
+ body: JSON.stringify({ question: searchTerm || "Seattle activities this weekend" }),
22
+ signal,
23
  });
24
+
25
  if (!response.ok) {
26
+ throw new Error(`HTTP error! status: ${response.status}`);
27
+ } else {
28
+ const reader = response.body.getReader();
29
+ const decoder = new TextDecoder();
30
+ let text = '';
31
+
32
+ try {
33
+ while (true) {
34
+ const { value, done } = await reader.read();
35
+
36
+ if (done) {
37
+ // When the stream ends, we can parse the complete text
38
+ const json = JSON.parse(text);
39
+ console.log(json);
40
+ setAiResponse(json);
41
+ console.log("Stream complete");
42
+ break;
43
+ }
44
+
45
+ text += decoder.decode(value, {stream: true});
46
+ }
47
+ } catch (error) {
48
+ console.error("Failed to parse JSON", error);
49
  }
 
 
 
50
  }
51
+ }
52
+
53
+ fetchData().catch(error => {
54
+ console.error('Fetch failed: ', error);
55
+ });
56
+
57
+ return () => controller.abort();
58
+ }, [searchParams, searchTerm]);
59
 
60
 
61
  console.log(aiResponse);
 
109
  },
110
  }}
111
  >
112
+ {aiResponse}
113
  </MemoizedReactMarkdown>
114
  </div>
115
  );
src/pages/api/llm.js CHANGED
@@ -1,21 +1,24 @@
1
  import { GoogleCustomSearch } from "openai-function-calling-tools";
2
  import { LLMError, LLMStream } from './stream';
3
 
4
- const handler = async (req, res) => {
5
  try {
 
 
6
  const googleCustomSearch = new GoogleCustomSearch({
7
  apiKey: process.env.API_KEY,
8
- googleCSEId: process.env.CONTEXT_KEY
9
  });
 
10
  const messages = [
11
  {
12
  role: "user",
13
- content: "What are some fun things to do in Seattle, WA this weekend?"
14
  },
15
  ];
16
 
17
  const functions = {
18
- googleCustomSearch
19
  };
20
 
21
  let promptToSend = "You are a helpful assistant";
@@ -24,11 +27,11 @@ const handler = async (req, res) => {
24
  } catch (error) {
25
  console.error(error);
26
  if (error instanceof LLMError) {
27
- res.status(500).send({ error: error.message });
28
  } else {
29
- res.status(500).send({ error: 'An error occurred' });
30
  }
31
  }
32
  };
33
 
34
- export default handler;
 
1
  import { GoogleCustomSearch } from "openai-function-calling-tools";
2
  import { LLMError, LLMStream } from './stream';
3
 
4
+ const handler = async (req) => {
5
  try {
6
+ const { question } = (await req.body);
7
+
8
  const googleCustomSearch = new GoogleCustomSearch({
9
  apiKey: process.env.API_KEY,
10
+ googleCSEId: process.env.CONTEXT_KEY,
11
  });
12
+
13
  const messages = [
14
  {
15
  role: "user",
16
+ content: question,
17
  },
18
  ];
19
 
20
  const functions = {
21
+ googleCustomSearch,
22
  };
23
 
24
  let promptToSend = "You are a helpful assistant";
 
27
  } catch (error) {
28
  console.error(error);
29
  if (error instanceof LLMError) {
30
+ return new Response('Error', { status: 500, statusText: error.message });
31
  } else {
32
+ return new Response('Error', { status: 500 });
33
  }
34
  }
35
  };
36
 
37
+ export default handler;
src/pages/api/stream.js CHANGED
@@ -17,7 +17,8 @@ export const LLMStream = async (
17
  model,
18
  systemPrompt,
19
  temperature,
20
- messages
 
21
  ) => {
22
  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
23
  const res = await fetch(url, {
 
17
  model,
18
  systemPrompt,
19
  temperature,
20
+ messages,
21
+ functions
22
  ) => {
23
  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
24
  const res = await fetch(url, {