serp-chat / src /stream.js
matt HOFFNER
refactor to use openai functions
f6091f7
raw
history blame
3.78 kB
import { AZURE_DEPLOYMENT_ID, OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from './pages/api/llm';
import { createParser } from 'eventsource-parser';
export class OpenAIError extends Error {
constructor(message, type, param, code) {
super(message);
this.name = 'OpenAIError';
this.type = type;
this.param = param;
this.code = code;
}
}
export const OpenAIStream = async (
model,
systemPrompt,
temperature,
key,
messages
) => {
let url = `${OPENAI_API_HOST}/v1/chat/completions`;
if (OPENAI_API_TYPE === 'azure') {
url = `${OPENAI_API_HOST}/openai/deployments/${AZURE_DEPLOYMENT_ID}/chat/completions?api-version=${OPENAI_API_VERSION}`;
}
const res = await fetch(url, {
headers: {
'Content-Type': 'application/json',
...(OPENAI_API_TYPE === 'openai' && {
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
}),
...(OPENAI_API_TYPE === 'azure' && {
'api-key': `${key ? key : process.env.OPENAI_API_KEY}`
}),
...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && {
'OpenAI-Organization': OPENAI_ORGANIZATION,
}),
},
method: 'POST',
body: JSON.stringify({
...(OPENAI_API_TYPE === 'openai' && {model: model.id}),
messages: [
{
role: 'system',
content: systemPrompt,
},
...messages,
],
max_tokens: 1000,
temperature: temperature,
stream: true,
}),
});
const encoder = new TextEncoder();
const decoder = new TextDecoder();
if (res.status !== 200) {
const result = await res.json();
if (result.error) {
throw new OpenAIError(
result.error.message,
result.error.type,
result.error.param,
result.error.code,
);
} else {
throw new Error(
`OpenAI API returned an error: ${
decoder.decode(result?.value) || result.statusText
}`,
);
}
}
const stream = new ReadableStream({
async start(controller) {
const onParse = async (event) => {
if (event.type === 'event') {
const data = event.data;
try {
const json = JSON.parse(data);
if (json.choices[0].finish_reason === "stop") {
controller.close();
return;
} else if (json.choices[0].finish_reason === "function_call") {
const fnName = json.choices[0].message.function_call.name;
const args = json.choices[0].message.function_call.arguments;
const fn = functions[fnName];
const result = await fn(...Object.values(JSON.parse(args)));
console.log(`Function call: ${fnName}, Arguments: ${args}`);
console.log(`Calling Function ${fnName} Result: ` + result);
messages.push({
role: "assistant",
content: "",
function_call: {
name: fnName,
arguments: args,
},
});
messages.push({
role: "function",
name: fnName,
content: JSON.stringify({ result: result }),
});
}
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
controller.error(e);
}
}
};
const parser = createParser(onParse);
for await (const chunk of res.body) {
parser.feed(decoder.decode(chunk));
}
},
});
return stream;
};