File size: 3,760 Bytes
29df9bc
 
888d022
 
 
916e00a
29df9bc
 
916e00a
29df9bc
 
 
 
 
 
5b2069d
29df9bc
 
 
82ddd60
 
29df9bc
15408a7
29df9bc
 
 
 
 
284c9cc
888d022
29df9bc
 
 
 
 
 
 
 
 
 
 
 
 
15408a7
2a8011b
29df9bc
 
 
 
 
 
 
 
 
888d022
29df9bc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15408a7
 
 
 
 
f6091f7
29df9bc
 
2a8011b
29df9bc
ac5a6cc
d73f75f
 
15408a7
29df9bc
15408a7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74edd3f
 
53763e2
048048c
15408a7
 
 
 
 
 
 
 
 
2a8011b
29df9bc
284c9cc
29df9bc
 
 
 
15408a7
29df9bc
15408a7
29df9bc
 
 
 
 
15408a7
29df9bc
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
import { createParser } from 'eventsource-parser';

export const OPENAI_API_HOST = process.env.OPENAI_API_HOST || "https://api.openai.com";
export const OPENAI_API_TYPE = process.env.OPENAI_API_TYPE || "openai";

export class LLMError extends Error {
  constructor(message, type, param, code) {
    super(message);
    this.name = 'LLMError';
    this.type = type;
    this.param = param;
    this.code = code;
  }
}

export const LLMStream = async (
  model,
  systemPrompt,
  temperature,
  messages,
  functions
) => {

  let url = `${OPENAI_API_HOST}/v1/chat/completions`;
  const res = await fetch(url, {
    headers: {
      'Content-Type': 'application/json',
      ...(OPENAI_API_TYPE === 'openai' && {
        Authorization: `Bearer ${process.env.OPENAI_API_KEY}`
      })
    },
    method: 'POST',
    body: JSON.stringify({
      ...(OPENAI_API_TYPE === 'openai' && {model: model.id}),
      messages: [
        {
          role: 'system',
          content: systemPrompt,
        },
        ...messages,
      ],
      max_tokens: 1000,
      temperature: temperature,
      functions: [functions['googleCustomSearch']['googleCustomSearchSchema']],
      stream: true,
    }),
  });

  const encoder = new TextEncoder();
  const decoder = new TextDecoder();

  if (res.status !== 200) {
    const result = await res.json();
    if (result.error) {
      throw new LLMError(
        result.error.message,
        result.error.type,
        result.error.param,
        result.error.code,
      );
    } else {
      throw new Error(
        `OpenAI API returned an error: ${
          decoder.decode(result?.value) || result.statusText
        }`,
      );
    }
  }

  const stream = new ReadableStream({
    async start(controller) {
      let func_call = {
        "name": null,
        "arguments": "",
      };
  
      const onParse = async (event) => {
        if (event.type === 'event') {
          const data = event.data;

          try {
            if (data === "[DONE]" || !data) {
              return;
            }
  
            const json = JSON.parse(data);
  
            if (Array.isArray(json.choices) && json.choices.length > 0) {
              const choice = json.choices[0];
              const delta = choice.delta;
  
              if (choice.finish_reason === "stop") {
                controller.close();
                return;
              }
  
              if (delta.hasOwnProperty("function_call")) {
                if (delta.function_call.hasOwnProperty("name")) {
                  func_call["name"] = delta.function_call["name"];
                }
                if (delta.function_call.hasOwnProperty("arguments")) {
                  func_call["arguments"] += delta.function_call["arguments"];
                }
              }
  
              if (choice.finish_reason === "function_call") {
                // function call here using func_call
                const fn = functions[func_call.name][func_call.name];
                const funcResult = await fn(func_call.arguments);
                const serpQueue = encoder.encode(funcResult);
                controller.enqueue(serpQueue);
              }
  
              if (delta && 'content' in delta) {
                const text = delta.content;
                const queue = encoder.encode(text);
                controller.enqueue(queue);
              }
            } else {
              console.error('No choices found in json');
            }
          } catch (e) {
            console.log(e);
            controller.error(e);
          }
        }
      };
  
      const parser = createParser(onParse);
  
      for await (const chunk of res.body) {
        parser.feed(decoder.decode(chunk));
      }
    },
  });
  

  return stream;
};