Spaces:
Paused
Paused
File size: 1,277 Bytes
f6091f7 916e00a f6091f7 2a8011b 916e00a 82ddd60 f6091f7 82ddd60 916e00a e35a983 916e00a 2a8011b 916e00a 7155c10 6f9214c 2a8011b 53763e2 2a8011b 53763e2 e86b927 160e44f 53763e2 2a8011b ac5a6cc 916e00a ac5a6cc 916e00a ac5a6cc 29df9bc 916e00a 2016044 ac5a6cc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import { GoogleCustomSearch } from "openai-function-calling-tools";
import { LLMError, LLMStream } from './stream';
const handler = async (req, res) => {
try {
const googleCustomSearch = new GoogleCustomSearch({
apiKey: process.env.API_KEY,
googleCSEId: process.env.CONTEXT_KEY,
});
const messages = [
{
role: "user",
content: req.body.question,
},
];
const functions = {
googleCustomSearch
};
const promptToSend = "You are a helpful assistant, a search term is provided and you are given search results to help provide a useful response.";
const stream = await LLMStream({ id: "gpt-3.5-turbo" }, promptToSend, 0.8, messages, functions);
let data = "";
const decoder = new TextDecoder();
for await (const chunk of stream) {
let text = decoder.decode(chunk);
if (text !== 'null') {
data += text;
res.write(data);
}
}
return res.end();
} catch (error) {
console.error(error);
if (error instanceof LLMError) {
return new Response('Error', { status: 500, statusText: error.message });
} else {
return new Response('Error', { status: 500 });
}
}
};
export default handler; |