matt HOFFNER commited on
Commit
328552e
·
1 Parent(s): ac695ee

try fetch and post

Browse files
Files changed (2) hide show
  1. src/app/search/web/page.jsx +43 -19
  2. src/pages/api/llm.js +7 -3
src/app/search/web/page.jsx CHANGED
@@ -1,31 +1,55 @@
1
- "use client"
2
  import { useEffect, useState } from "react";
3
 
4
  export default function WebSearchPage({ searchParams }) {
5
- console.log(searchParams);
6
- const [aiResponse, setAiResponse] = useState(null);
7
- const startIndex = searchParams.start || "1";
8
 
9
  useEffect(() => {
 
 
 
10
  const url = new URL('/api/llm', window.location.origin);
11
- url.searchParams.append('question', searchParams.searchTerm);
12
- url.searchParams.append('startIndex', startIndex);
13
-
14
- const openaiRes = new EventSource(url);
15
 
16
- // Listen for AI responses and append to state
17
- openaiRes.onmessage = function(event) {
18
- setAiResponse(aiResponse => aiResponse + event.data);
19
- };
20
-
21
- // Close connection when component unmounts
22
- return () => {
23
- openaiRes.close();
24
- };
25
- }, [searchParams, startIndex, aiResponse]);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  console.log(aiResponse);
28
 
29
-
30
  return <>{aiResponse ? JSON.stringify(aiResponse) : 'Loading...'}</>;
31
  }
 
 
1
  import { useEffect, useState } from "react";
2
 
3
  export default function WebSearchPage({ searchParams }) {
4
+ const [aiResponse, setAiResponse] = useState("");
 
 
5
 
6
  useEffect(() => {
7
+ if (!searchParams || !searchParams.searchTerm) return;
8
+
9
+ const { searchTerm, start = "1" } = searchParams;
10
  const url = new URL('/api/llm', window.location.origin);
 
 
 
 
11
 
12
+ fetch(url, {
13
+ method: 'POST',
14
+ headers: {
15
+ 'Content-Type': 'application/json'
16
+ },
17
+ body: JSON.stringify({
18
+ question: searchTerm,
19
+ startIndex: start
20
+ })
21
+ })
22
+ .then(response => {
23
+ if (!response.ok) {
24
+ throw new Error("HTTP error " + response.status);
25
+ }
26
+ // Create a reader to stream the response body
27
+ const reader = response.body.getReader();
28
+
29
+ // Read and process the response body chunks
30
+ return new ReadableStream({
31
+ start(controller) {
32
+ function push() {
33
+ reader.read().then(({ done, value }) => {
34
+ if (done) {
35
+ // Close the stream when done
36
+ controller.close();
37
+ return;
38
+ }
39
+ // Decode the response chunk and append it to the existing response
40
+ setAiResponse(prev => prev + new TextDecoder().decode(value));
41
+ // Get the next chunk
42
+ push();
43
+ });
44
+ }
45
+ push();
46
+ }
47
+ });
48
+ })
49
+ .catch(console.error);
50
+ }, [searchParams]);
51
 
52
  console.log(aiResponse);
53
 
 
54
  return <>{aiResponse ? JSON.stringify(aiResponse) : 'Loading...'}</>;
55
  }
src/pages/api/llm.js CHANGED
@@ -20,13 +20,17 @@ export default function handler(req, res) {
20
  const openai = new OpenAIApi(configuration);
21
 
22
  const messages = [
 
 
 
 
23
  {
24
  role: "user",
25
  content: QUESTION,
26
  },
27
  ];
28
 
29
- const { googleCustomSearch, googleCustomSearchSchema } = new GoogleCustomSearch({
30
  apiKey: process.env.API_KEY,
31
  googleCSEId: process.env.CONTEXT_KEY,
32
  });
@@ -39,7 +43,7 @@ export default function handler(req, res) {
39
  const response = await openai.createChatCompletion({
40
  model: "gpt-3.5-turbo-0613",
41
  messages,
42
- functions: [googleCustomSearchSchema],
43
  temperature: 0,
44
  });
45
 
@@ -64,7 +68,7 @@ export default function handler(req, res) {
64
  const args = response.data.choices[0].message.function_call.arguments;
65
 
66
  const fn = functions[fnName];
67
- const result = await fn(...Object.values(JSON.parse(args)));
68
 
69
  messages.push({
70
  role: "assistant",
 
20
  const openai = new OpenAIApi(configuration);
21
 
22
  const messages = [
23
+ {
24
+ role: "system",
25
+ content: "You are a helpful assistant.",
26
+ },
27
  {
28
  role: "user",
29
  content: QUESTION,
30
  },
31
  ];
32
 
33
+ const googleCustomSearch = new GoogleCustomSearch({
34
  apiKey: process.env.API_KEY,
35
  googleCSEId: process.env.CONTEXT_KEY,
36
  });
 
43
  const response = await openai.createChatCompletion({
44
  model: "gpt-3.5-turbo-0613",
45
  messages,
46
+ functions: [googleCustomSearch.schema],
47
  temperature: 0,
48
  });
49
 
 
68
  const args = response.data.choices[0].message.function_call.arguments;
69
 
70
  const fn = functions[fnName];
71
+ const result = await fn.call(args);
72
 
73
  messages.push({
74
  role: "assistant",