micole66 commited on
Commit
35c8b1c
·
1 Parent(s): f8f8199

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -1
app.py CHANGED
@@ -1,3 +1,34 @@
1
  import gradio as gr
 
 
2
 
3
- gr.Interface.load(("huggingface/bigscience/bloomz"), allow_flagging=True, enable_queue=True, api_key="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").launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ import transformers
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
+ #checkpoint = "bigscience/bloomz" # english
6
+ checkpoint = "bigscience/bloomz" # english
7
+ #checkpoint = "bigscience/bloomz-7b1-mt" # non english
8
+
9
+ import torch
10
+ device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
11
+
12
+ #tokenizer = AutoTokenizer.from_pretrained(checkpoint)
13
+ #model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map="auto", load_in_8bit=False).to(device)
14
+
15
+ tokenizer = AutoTokenizer.from_pretrained(checkpoint)
16
+ model = AutoModelForCausalLM.from_pretrained(checkpoint)
17
+
18
+ def get_result(prompt):
19
+ # prompt = f"'''{str(prompt)}'''"
20
+ inputs = tokenizer.encode(prompt, return_tensors="pt").to(device)
21
+ outputs = model.generate(inputs, max_length= len(prompt)+1000)
22
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
23
+
24
+ title = "Bloomz (english small)"
25
+ description = "Write an instruction and get the Bloomz result."
26
+ examples = [["Translate to English: Je t'aime."]]
27
+
28
+ demo = gr.Interface(fn=get_result, inputs="text", outputs="text",
29
+ title=title,
30
+ description=description,
31
+ examples=examples,
32
+ allow_flagging="never")
33
+
34
+ demo.launch()