Datasets:
File size: 3,384 Bytes
8d88d9b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import { buildPrompt } from "$lib/buildPrompt";
import { z } from "zod";
import type { Endpoint } from "../endpoints";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
import { logger } from "$lib/server/logger";
export const endpointLangserveParametersSchema = z.object({
weight: z.number().int().positive().default(1),
model: z.any(),
type: z.literal("langserve"),
url: z.string().url(),
});
export function endpointLangserve(
input: z.input<typeof endpointLangserveParametersSchema>
): Endpoint {
const { url, model } = endpointLangserveParametersSchema.parse(input);
return async ({ messages, preprompt, continueMessage }) => {
const prompt = await buildPrompt({
messages,
continueMessage,
preprompt,
model,
});
const r = await fetch(`${url}/stream`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
input: { text: prompt },
}),
});
if (!r.ok) {
throw new Error(`Failed to generate text: ${await r.text()}`);
}
const encoder = new TextDecoderStream();
const reader = r.body?.pipeThrough(encoder).getReader();
return (async function* () {
let stop = false;
let generatedText = "";
let tokenId = 0;
let accumulatedData = ""; // Buffer to accumulate data chunks
while (!stop) {
// Read the stream and log the outputs to console
const out = (await reader?.read()) ?? { done: false, value: undefined };
// If it's done, we cancel
if (out.done) {
reader?.cancel();
return;
}
if (!out.value) {
return;
}
// Accumulate the data chunk
accumulatedData += out.value;
// Keep read data to check event type
const eventData = out.value;
// Process each complete JSON object in the accumulated data
while (accumulatedData.includes("\n")) {
// Assuming each JSON object ends with a newline
const endIndex = accumulatedData.indexOf("\n");
let jsonString = accumulatedData.substring(0, endIndex).trim();
// Remove the processed part from the buffer
accumulatedData = accumulatedData.substring(endIndex + 1);
// Stopping with end event
if (eventData.startsWith("event: end")) {
stop = true;
yield {
token: {
id: tokenId++,
text: "",
logprob: 0,
special: true,
},
generated_text: generatedText,
details: null,
} satisfies TextGenerationStreamOutput;
reader?.cancel();
continue;
}
if (eventData.startsWith("event: data") && jsonString.startsWith("data: ")) {
jsonString = jsonString.slice(6);
let data = null;
// Handle the parsed data
try {
data = JSON.parse(jsonString);
} catch (e) {
logger.error(e, "Failed to parse JSON");
logger.error(jsonString, "Problematic JSON string:");
continue; // Skip this iteration and try the next chunk
}
// Assuming content within data is a plain string
if (data) {
generatedText += data;
const output: TextGenerationStreamOutput = {
token: {
id: tokenId++,
text: data,
logprob: 0,
special: false,
},
generated_text: null,
details: null,
};
yield output;
}
}
}
}
})();
};
}
export default endpointLangserve;
|