matthoffner commited on
Commit
63b93b4
·
1 Parent(s): fce0d00

Update src/pages/api/llm.js

Browse files
Files changed (1) hide show
  1. src/pages/api/llm.js +2 -2
src/pages/api/llm.js CHANGED
@@ -31,7 +31,7 @@ export const config = {
31
 
32
  const handler = async (req) => {
33
  try {
34
- const { model, messages, key, prompt, temperature } = (await req.json());
35
 
36
  await init((imports) => WebAssembly.instantiate(wasm, imports));
37
  const encoding = new Tiktoken(
@@ -68,7 +68,7 @@ const handler = async (req) => {
68
 
69
  encoding.free();
70
 
71
- const stream = await OpenAIStream(model, promptToSend, temperatureToUse, key, messagesToSend);
72
 
73
  return new Response(stream);
74
  } catch (error) {
 
31
 
32
  const handler = async (req) => {
33
  try {
34
+ const { model, messages, prompt, temperature } = req.body;
35
 
36
  await init((imports) => WebAssembly.instantiate(wasm, imports));
37
  const encoding = new Tiktoken(
 
68
 
69
  encoding.free();
70
 
71
+ const stream = await OpenAIStream(model, promptToSend, temperatureToUse, null, messagesToSend);
72
 
73
  return new Response(stream);
74
  } catch (error) {