matt HOFFNER commited on
Commit
eef247f
·
1 Parent(s): 644a95f

SSE get request

Browse files
Files changed (2) hide show
  1. src/app/search/web/page.jsx +28 -39
  2. src/pages/api/llm.js +10 -8
src/app/search/web/page.jsx CHANGED
@@ -1,55 +1,44 @@
1
- "use client"
2
  import { useEffect, useState } from "react";
3
 
4
  export default function WebSearchPage({ searchParams }) {
5
  const [aiResponse, setAiResponse] = useState("");
 
6
 
7
  useEffect(() => {
8
  if (!searchParams || !searchParams.searchTerm) return;
9
 
10
  const { searchTerm, start = "1" } = searchParams;
11
  const url = new URL('/api/llm', window.location.origin);
 
12
 
13
- fetch(url, {
14
- method: 'POST',
15
- headers: {
16
- 'Content-Type': 'application/json'
17
- },
18
- body: JSON.stringify({
19
- question: searchTerm,
20
- startIndex: start
21
- })
22
- })
23
- .then(response => {
24
- if (!response.ok) {
25
- throw new Error("HTTP error " + response.status);
26
- }
27
- // Create a reader to stream the response body
28
- const reader = response.body.getReader();
29
-
30
- // Read and process the response body chunks
31
- return new ReadableStream({
32
- start(controller) {
33
- function push() {
34
- reader.read().then(({ done, value }) => {
35
- if (done) {
36
- // Close the stream when done
37
- controller.close();
38
- return;
39
- }
40
- // Decode the response chunk and append it to the existing response
41
- setAiResponse(prev => prev + new TextDecoder().decode(value));
42
- // Get the next chunk
43
- push();
44
- });
45
- }
46
- push();
47
- }
48
- });
49
- })
50
- .catch(console.error);
51
  }, [searchParams]);
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  console.log(aiResponse);
54
 
55
  return <>{aiResponse}</>;
 
 
1
  import { useEffect, useState } from "react";
2
 
3
  export default function WebSearchPage({ searchParams }) {
4
  const [aiResponse, setAiResponse] = useState("");
5
+ const [eventSource, setEventSource] = useState(null);
6
 
7
  useEffect(() => {
8
  if (!searchParams || !searchParams.searchTerm) return;
9
 
10
  const { searchTerm, start = "1" } = searchParams;
11
  const url = new URL('/api/llm', window.location.origin);
12
+ url.search = new URLSearchParams({ question: searchTerm, startIndex: start });
13
 
14
+ // No need to make a fetch request. Directly open the EventSource connection.
15
+ const es = new EventSource(url);
16
+ setEventSource(es);
17
+
18
+ return () => {
19
+ if (es) es.close(); // Close the EventSource when the component is unmounted.
20
+ };
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  }, [searchParams]);
22
 
23
+ // Add event listener for the EventSource
24
+ useEffect(() => {
25
+ if (eventSource) {
26
+ eventSource.onmessage = (event) => {
27
+ setAiResponse(prev => prev + event.data);
28
+ };
29
+
30
+ eventSource.onerror = (event) => {
31
+ console.error("EventSource failed:", event);
32
+ };
33
+ }
34
+
35
+ return () => {
36
+ if (eventSource) {
37
+ eventSource.close();
38
+ }
39
+ };
40
+ }, [eventSource]);
41
+
42
  console.log(aiResponse);
43
 
44
  return <>{aiResponse}</>;
src/pages/api/llm.js CHANGED
@@ -2,10 +2,15 @@ import { Configuration, OpenAIApi } from "openai";
2
  import { GoogleCustomSearch } from "openai-function-calling-tools";
3
 
4
  export default function handler(req, res) {
 
 
 
 
 
5
  const QUESTION = req.body.question;
6
 
7
  if (!QUESTION) {
8
- res.status(400).send({ error: 'Question is missing in request' });
9
  return;
10
  }
11
 
@@ -15,10 +20,6 @@ export default function handler(req, res) {
15
  const openai = new OpenAIApi(configuration);
16
 
17
  const messages = [
18
- {
19
- role: "system",
20
- content: "You are a helpful assistant.",
21
- },
22
  {
23
  role: "user",
24
  content: QUESTION,
@@ -38,8 +39,9 @@ export default function handler(req, res) {
38
  const response = await openai.createChatCompletion({
39
  model: "gpt-3.5-turbo-0613",
40
  messages,
41
- functions: [googleCustomSearch.schema],
42
  temperature: 0,
 
43
  });
44
 
45
  return response;
@@ -63,7 +65,7 @@ export default function handler(req, res) {
63
  const args = response.data.choices[0].message.function_call.arguments;
64
 
65
  const fn = functions[fnName];
66
- const result = await fn.call(args);
67
 
68
  messages.push({
69
  role: "assistant",
@@ -87,4 +89,4 @@ export default function handler(req, res) {
87
  console.error(err);
88
  res.status(500).send({ error: 'Internal Server Error' });
89
  });
90
- }
 
2
  import { GoogleCustomSearch } from "openai-function-calling-tools";
3
 
4
  export default function handler(req, res) {
5
+ if (req.method !== 'POST') {
6
+ res.status(405).send({ error: 'Method Not Allowed', method: req.method });
7
+ return;
8
+ }
9
+
10
  const QUESTION = req.body.question;
11
 
12
  if (!QUESTION) {
13
+ res.status(400).send({ error: 'Question is missing in request body' });
14
  return;
15
  }
16
 
 
20
  const openai = new OpenAIApi(configuration);
21
 
22
  const messages = [
 
 
 
 
23
  {
24
  role: "user",
25
  content: QUESTION,
 
39
  const response = await openai.createChatCompletion({
40
  model: "gpt-3.5-turbo-0613",
41
  messages,
42
+ functions: [googleCustomSearchSchema],
43
  temperature: 0,
44
+ stream: true
45
  });
46
 
47
  return response;
 
65
  const args = response.data.choices[0].message.function_call.arguments;
66
 
67
  const fn = functions[fnName];
68
+ const result = await fn(...Object.values(JSON.parse(args)));
69
 
70
  messages.push({
71
  role: "assistant",
 
89
  console.error(err);
90
  res.status(500).send({ error: 'Internal Server Error' });
91
  });
92
+ }