File size: 1,640 Bytes
bbef364 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
import { Message } from '@/types/chat';
import { OllamaModel } from '@/types/ollama';
import { OLLAMA_HOST } from '../app/const';
import {
ParsedEvent,
ReconnectInterval,
createParser,
} from 'eventsource-parser';
export class OllamaError extends Error {
constructor(message: string) {
super(message);
this.name = 'OllamaError';
}
}
export const OllamaStream = async (
model: string,
systemPrompt: string,
temperature : number,
prompt: string,
) => {
let url = `${OLLAMA_HOST}/api/generate`;
const res = await fetch(url, {
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Cache-Control': 'no-cache',
'Pragma': 'no-cache',
},
method: 'POST',
body: JSON.stringify({
model: model,
prompt: prompt,
system: systemPrompt,
options: {
temperature: temperature,
},
}),
});
const encoder = new TextEncoder();
const decoder = new TextDecoder();
if (res.status !== 200) {
const result = await res.json();
if (result.error) {
throw new OllamaError(
result.error
);
}
}
const responseStream = new ReadableStream({
async start(controller) {
try {
for await (const chunk of res.body as any) {
const text = decoder.decode(chunk);
const parsedData = JSON.parse(text);
if (parsedData.response) {
controller.enqueue(encoder.encode(parsedData.response));
}
}
controller.close();
} catch (e) {
controller.error(e);
}
},
});
return responseStream;
}; |