|
import {Post, Topic} from "./topics"; |
|
import {Settings} from "./settings"; |
|
import {generateUUID} from "./uuids"; |
|
import {tokenizeTopic, tokensToPosts, tokensToTopic} from "./model"; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
type OobaboogaStreamChunk = { |
|
id: string; |
|
object: string; |
|
created: number; |
|
model: string; |
|
choices: { |
|
index: number; |
|
finish_reason: string | null; |
|
text: string; |
|
logprobs: { |
|
top_logprobs: Record<string, number>[]; |
|
}; |
|
}[]; |
|
usage?: { |
|
prompt_tokens: number; |
|
completion_tokens: number; |
|
total_tokens: number; |
|
}; |
|
}; |
|
|
|
export async function generateTopic(settings: Settings, nPosts: number): Promise<Topic> { |
|
console.log(settings); |
|
const rawOutput = await fetApiWithStream(settings, "<|start_header_id|>", nPosts); |
|
|
|
|
|
|
|
|
|
return tokensToTopic(rawOutput); |
|
} |
|
|
|
export async function generatePosts(settings: Settings, nPosts: number, topic: Topic): Promise<Post[]> { |
|
|
|
const rawOutput = await fetApiWithStream(settings, tokenizeTopic(topic), nPosts); |
|
|
|
|
|
|
|
|
|
console.log("rawOutput"); |
|
console.log(rawOutput); |
|
|
|
return tokensToPosts(rawOutput); |
|
} |
|
|
|
|
|
|
|
|
|
async function fetApi(settings: Settings): Promise<string> { |
|
const response = await fetch(new URL("/v1/completions", settings.apiURL), { |
|
method: "POST", |
|
headers: { |
|
"Content-Type": "application/json", |
|
}, |
|
body: JSON.stringify({ |
|
prompt: "<|start_header_id|>", |
|
temperature: settings.temperature, |
|
max_tokens: 1000, |
|
stream: false, |
|
skip_special_tokens: false, |
|
stop: "<|end_of_post|>" |
|
|
|
|
|
|
|
}), |
|
}); |
|
|
|
if (response.status !== 200) { |
|
throw new Error(`Failed to fetch API (${response.status}): ${response.statusText}`); |
|
} |
|
|
|
const json = await response.json(); |
|
|
|
console.log(json) |
|
|
|
return json.choices[0].text; |
|
} |
|
|
|
const postEndToken = "<|end_of_post|>"; |
|
|
|
|
|
|
|
async function fetApiWithStream(settings: Settings, prompt: string, nPosts: number): Promise<string> { |
|
const controller = new AbortController() |
|
const response = await fetch(new URL("/v1/completions", settings.apiURL), { |
|
method: "POST", |
|
headers: { |
|
"Content-Type": "application/json", |
|
}, |
|
body: JSON.stringify({ |
|
prompt, |
|
temperature: settings.temperature, |
|
max_tokens: 2000, |
|
stream: true, |
|
skip_special_tokens: false, |
|
|
|
|
|
|
|
|
|
}), |
|
signal: controller.signal, |
|
}); |
|
|
|
if (!response.ok) { |
|
throw new Error(`Failed to fetch API (${response.status} ${response.statusText}): ${await response.text()}`); |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
console.log(`Fetching topic with ${nPosts} posts...`); |
|
|
|
let endTokenCount = 0; |
|
let tokens = ""; |
|
let finishReason: string | null = null; |
|
|
|
try { |
|
await response.body.pipeThrough(new TextDecoderStream("utf-8")).pipeTo(new WritableStream({ |
|
write(rawChunk: string) { |
|
|
|
for (const rawChunkLine of rawChunk.split("\n")) { |
|
if (!rawChunkLine.startsWith("data:")) continue; |
|
const chunk = JSON.parse(rawChunkLine.slice(6)) as OobaboogaStreamChunk; |
|
const text = chunk.choices[0].text; |
|
console.log(text) |
|
tokens += chunk.choices[0].text; |
|
if (text.includes(postEndToken)) { |
|
endTokenCount++; |
|
|
|
if(endTokenCount >= nPosts) { |
|
finishReason = "custom_stop"; |
|
controller.abort(); |
|
break; |
|
} |
|
} else { |
|
finishReason = chunk.choices[0].finish_reason; |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
} |
|
})); |
|
} catch (e) { |
|
if (e.name !== 'AbortError') { |
|
throw e; |
|
} |
|
} |
|
|
|
console.log("Done fetching data") |
|
console.log(`Finish reason: ${finishReason}`) |
|
console.log(`Tokens: ${tokens}`) |
|
|
|
return tokens; |
|
} |