File size: 1,244 Bytes
faca43f
 
 
5528541
faca43f
 
5528541
faca43f
5528541
 
 
 
 
 
 
faca43f
 
 
 
 
 
 
5528541
 
 
 
 
 
 
faca43f
 
5528541
 
faca43f
 
 
 
5528541
faca43f
 
 
 
 
 
 
 
 
5528541
faca43f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import { pipeline, env } from "@xenova/transformers";

export class FlanPipeline {
	static curr_model = "";
	static instance = null;

	static async getInstance(progress_callback = null, model, task) {
		if (this.instance === null) {
			this.instance = pipeline(task, model, { progress_callback });
			this.curr_model = model;
		} else {
			if (this.curr_model != model) {
				this.instance = pipeline(task, model, { progress_callback });
				this.curr_model = model;
			}
		}
		return this.instance;
	}
}

// Listen for messages from the main thread
self.addEventListener("message", async (event) => {
	let pipe = await FlanPipeline.getInstance(
		(x) => {
			self.postMessage(x);
		},
		event.data.model,
		event.data.task
	);

	let output = await pipe(event.data.text, {
		max_new_tokens: event.data.max_new_tokens,
		temperature: event.data.temperature,
		callback_function: (x) => {
			self.postMessage({
				status: "update",
				output: pipe.tokenizer.decode(x[0].output_token_ids, { skip_special_tokens: true }),
				id_now: event.data.id_now,
			});
		},
	});

	// Send the output back to the main thread
	self.postMessage({
		status: "complete",
		output: output,
		searchID: event.data.searchID,
		id_now: event.data.id_now,
	});
});