Spaces:
Paused
Paused
matt HOFFNER
commited on
Commit
Β·
916e00a
1
Parent(s):
834cad6
cleanup
Browse files- src/app/search/web/page.jsx +53 -1
- src/pages/api/llm.js +68 -77
- src/{stream.js β pages/api/stream.js} +3 -3
src/app/search/web/page.jsx
CHANGED
@@ -42,5 +42,57 @@ export default function WebSearchPage({ searchParams }) {
|
|
42 |
|
43 |
console.log(aiResponse);
|
44 |
|
45 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
}
|
|
|
42 |
|
43 |
console.log(aiResponse);
|
44 |
|
45 |
+
return (
|
46 |
+
<div className="flex flex-row">
|
47 |
+
<MemoizedReactMarkdown
|
48 |
+
className="prose dark:prose-invert flex-1"
|
49 |
+
components={{
|
50 |
+
code({ node, inline, className, children, ...props }) {
|
51 |
+
if (children.length) {
|
52 |
+
if (children[0] == 'β') {
|
53 |
+
return <span className="animate-pulse cursor-default mt-1">β</span>
|
54 |
+
}
|
55 |
+
children[0] = (children[0]).replace("`β`", "β")
|
56 |
+
}
|
57 |
+
const match = /language-(\w+)/.exec(className || '');
|
58 |
+
return !inline ? (
|
59 |
+
<CodeBlock
|
60 |
+
key={Math.random()}
|
61 |
+
language={(match && match[1]) || ''}
|
62 |
+
value={String(children).replace(/\n$/, '')}
|
63 |
+
{...props}
|
64 |
+
/>
|
65 |
+
) : (
|
66 |
+
<code className={className} {...props}>
|
67 |
+
{children}
|
68 |
+
</code>
|
69 |
+
);
|
70 |
+
},
|
71 |
+
table({ children }) {
|
72 |
+
return (
|
73 |
+
<table className="border-collapse border border-black px-3 py-1 dark:border-white">
|
74 |
+
{children}
|
75 |
+
</table>
|
76 |
+
);
|
77 |
+
},
|
78 |
+
th({ children }) {
|
79 |
+
return (
|
80 |
+
<th className="break-words border border-black bg-gray-500 px-3 py-1 text-white dark:border-white">
|
81 |
+
{children}
|
82 |
+
</th>
|
83 |
+
);
|
84 |
+
},
|
85 |
+
td({ children }) {
|
86 |
+
return (
|
87 |
+
<td className="break-words border border-black px-3 py-1 dark:border-white">
|
88 |
+
{children}
|
89 |
+
</td>
|
90 |
+
);
|
91 |
+
},
|
92 |
+
}}
|
93 |
+
>
|
94 |
+
{aiResponse}
|
95 |
+
</MemoizedReactMarkdown>
|
96 |
+
</div>
|
97 |
+
);
|
98 |
}
|
src/pages/api/llm.js
CHANGED
@@ -1,92 +1,83 @@
|
|
1 |
-
import { Configuration, OpenAIApi } from "openai";
|
2 |
import { GoogleCustomSearch } from "openai-function-calling-tools";
|
|
|
|
|
3 |
|
4 |
-
|
5 |
-
|
6 |
-
res.status(405).send({ error: 'Method Not Allowed', method: req.method });
|
7 |
-
return;
|
8 |
-
}
|
9 |
|
10 |
-
|
|
|
11 |
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
|
|
|
|
|
|
16 |
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
const messages = [
|
23 |
-
{
|
24 |
-
role: "user",
|
25 |
-
content: QUESTION,
|
26 |
-
},
|
27 |
-
];
|
28 |
-
|
29 |
-
const googleCustomSearch = new GoogleCustomSearch({
|
30 |
-
apiKey: process.env.API_KEY,
|
31 |
-
googleCSEId: process.env.CONTEXT_KEY,
|
32 |
-
});
|
33 |
-
|
34 |
-
const functions = {
|
35 |
-
googleCustomSearch,
|
36 |
-
};
|
37 |
-
|
38 |
-
const getCompletion = async (messages) => {
|
39 |
-
const response = await openai.createChatCompletion({
|
40 |
-
model: "gpt-3.5-turbo-0613",
|
41 |
-
messages,
|
42 |
-
functions: [googleCustomSearchSchema],
|
43 |
-
temperature: 0,
|
44 |
-
stream: true
|
45 |
});
|
46 |
|
47 |
-
|
48 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
|
50 |
-
|
51 |
-
res.setHeader('Cache-Control', 'no-cache');
|
52 |
-
res.setHeader('Connection', 'keep-alive');
|
53 |
|
54 |
-
|
55 |
-
let
|
56 |
|
57 |
-
|
58 |
-
|
|
|
59 |
|
60 |
-
if (
|
61 |
-
res.write(`data: ${JSON.stringify({ result: response.data.choices[0].message.content })}\n\n`);
|
62 |
break;
|
63 |
-
} else if (response.data.choices[0].finish_reason === "function_call") {
|
64 |
-
const fnName = response.data.choices[0].message.function_call.name;
|
65 |
-
const args = response.data.choices[0].message.function_call.arguments;
|
66 |
-
|
67 |
-
const fn = functions[fnName];
|
68 |
-
const result = await fn(...Object.values(JSON.parse(args)));
|
69 |
-
|
70 |
-
messages.push({
|
71 |
-
role: "assistant",
|
72 |
-
content: "",
|
73 |
-
function_call: {
|
74 |
-
name: fnName,
|
75 |
-
arguments: args,
|
76 |
-
},
|
77 |
-
});
|
78 |
-
|
79 |
-
messages.push({
|
80 |
-
role: "function",
|
81 |
-
name: fnName,
|
82 |
-
content: JSON.stringify({ result: result }),
|
83 |
-
});
|
84 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
}
|
86 |
-
}
|
|
|
87 |
|
88 |
-
|
89 |
-
console.error(err);
|
90 |
-
res.status(500).send({ error: 'Internal Server Error' });
|
91 |
-
});
|
92 |
-
}
|
|
|
|
|
1 |
import { GoogleCustomSearch } from "openai-function-calling-tools";
|
2 |
+
import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const';
|
3 |
+
import { LLMError, LLMStream } from './stream';
|
4 |
|
5 |
+
// @ts-expect-error
|
6 |
+
import wasm from '../../node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm?module';
|
|
|
|
|
|
|
7 |
|
8 |
+
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
|
9 |
+
import { Tiktoken, init } from '@dqbd/tiktoken/lite/init';
|
10 |
|
11 |
+
export const config = {
|
12 |
+
runtime: 'edge',
|
13 |
+
};
|
14 |
+
|
15 |
+
const handler = async (req) => {
|
16 |
+
try {
|
17 |
+
const { question } = (await req.json());
|
18 |
|
19 |
+
await init((imports) => WebAssembly.instantiate(wasm, imports));
|
20 |
+
|
21 |
+
const googleCustomSearch = new GoogleCustomSearch({
|
22 |
+
apiKey: process.env.API_KEY,
|
23 |
+
googleCSEId: process.env.CONTEXT_KEY,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
});
|
25 |
|
26 |
+
const messages = [
|
27 |
+
{
|
28 |
+
role: "user",
|
29 |
+
content: question,
|
30 |
+
},
|
31 |
+
];
|
32 |
+
|
33 |
+
const functions = {
|
34 |
+
googleCustomSearch,
|
35 |
+
};
|
36 |
+
const encoding = new Tiktoken(
|
37 |
+
tiktokenModel.bpe_ranks,
|
38 |
+
tiktokenModel.special_tokens,
|
39 |
+
tiktokenModel.pat_str,
|
40 |
+
);
|
41 |
+
|
42 |
+
let promptToSend = question;
|
43 |
+
if (!promptToSend) {
|
44 |
+
promptToSend = DEFAULT_SYSTEM_PROMPT;
|
45 |
+
}
|
46 |
+
|
47 |
+
let temperatureToUse = temperature;
|
48 |
+
if (temperatureToUse == null) {
|
49 |
+
temperatureToUse = DEFAULT_TEMPERATURE;
|
50 |
+
}
|
51 |
|
52 |
+
const prompt_tokens = encoding.encode(promptToSend);
|
|
|
|
|
53 |
|
54 |
+
let tokenCount = prompt_tokens.length;
|
55 |
+
let messagesToSend = [];
|
56 |
|
57 |
+
for (let i = messages.length - 1; i >= 0; i--) {
|
58 |
+
const message = messages[i];
|
59 |
+
const tokens = encoding.encode(message.content);
|
60 |
|
61 |
+
if (tokenCount + tokens.length + 1000 > model.tokenLimit) {
|
|
|
62 |
break;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
}
|
64 |
+
tokenCount += tokens.length;
|
65 |
+
messagesToSend = [message, ...messagesToSend];
|
66 |
+
}
|
67 |
+
|
68 |
+
encoding.free();
|
69 |
+
|
70 |
+
const stream = await LLMStream(model, promptToSend, temperatureToUse, key, messagesToSend, functions);
|
71 |
+
|
72 |
+
return new Response(stream);
|
73 |
+
} catch (error) {
|
74 |
+
console.error(error);
|
75 |
+
if (error instanceof LLMError) {
|
76 |
+
return new Response('Error', { status: 500, statusText: error.message });
|
77 |
+
} else {
|
78 |
+
return new Response('Error', { status: 500 });
|
79 |
}
|
80 |
+
}
|
81 |
+
};
|
82 |
|
83 |
+
export default handler;
|
|
|
|
|
|
|
|
src/{stream.js β pages/api/stream.js}
RENAMED
@@ -1,10 +1,10 @@
|
|
1 |
-
import { AZURE_DEPLOYMENT_ID, OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from './
|
2 |
import { createParser } from 'eventsource-parser';
|
3 |
|
4 |
-
export class
|
5 |
constructor(message, type, param, code) {
|
6 |
super(message);
|
7 |
-
this.name = '
|
8 |
this.type = type;
|
9 |
this.param = param;
|
10 |
this.code = code;
|
|
|
1 |
+
import { AZURE_DEPLOYMENT_ID, OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from './llm';
|
2 |
import { createParser } from 'eventsource-parser';
|
3 |
|
4 |
+
export class LLMError extends Error {
|
5 |
constructor(message, type, param, code) {
|
6 |
super(message);
|
7 |
+
this.name = 'LLMError';
|
8 |
this.type = type;
|
9 |
this.param = param;
|
10 |
this.code = code;
|