File size: 1,413 Bytes
755dd12 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
import { DefaultSystem } from '../utils/constant';
import { IChatInputMessage, IStreamHandler } from '../interface';
import { BaseChat } from './base/base';
import { Ollama } from 'ollama';
const host = process.env.OLLAMA_HOST || 'http://localhost:11434';
const ollamaClient = new Ollama({
host
});
/**
* run large language models locally with Ollama.
*/
export class OllamaChat implements BaseChat {
public platform = 'ollama';
public async chat(
messages: IChatInputMessage[],
model = 'llama2',
system = DefaultSystem
): Promise<string | null> {
if (system) {
messages.unshift({
role: 'system',
content: system
});
}
const response = await ollamaClient.chat({
model,
messages
});
return response.message.content;
}
public async chatStream(
messages: IChatInputMessage[],
onMessage: IStreamHandler,
model = 'llama2',
system = DefaultSystem
): Promise<void> {
if (system) {
messages.unshift({
role: 'system',
content: system
});
}
const response = await ollamaClient.chat({
model,
stream: true,
messages,
});
for await (const chunk of response) {
onMessage?.(chunk.message.content, false);
}
onMessage?.(null, true);
}
public async list() {
return ollamaClient.list();
}
}
export const ollama = new OllamaChat();
|