Spaces:
Build error
Build error
Commit
·
85ededf
1
Parent(s):
7b473b9
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, TextIteratorStreamer
|
3 |
+
import gradio as gr
|
4 |
+
from threading import Thread
|
5 |
+
|
6 |
+
if torch.cuda.is_available():
|
7 |
+
device = "cuda"
|
8 |
+
if torch.backends.mps.is_available():
|
9 |
+
device = "mps"
|
10 |
+
|
11 |
+
theme = gr.themes.Base(
|
12 |
+
font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
|
13 |
+
)
|
14 |
+
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
|
16 |
+
model = AutoModelForCausalLM.from_pretrained(
|
17 |
+
"microsoft/phi-2",
|
18 |
+
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
|
19 |
+
trust_remote_code=True,
|
20 |
+
).to(device)
|
21 |
+
def generate_text(text, temperature, maxLen):
|
22 |
+
inputs = tokenizer([text], return_tensors="pt").to(device)
|
23 |
+
streamer = TextIteratorStreamer(tokenizer)
|
24 |
+
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=maxLen, temperature=temperature)
|
25 |
+
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
26 |
+
thread.start()
|
27 |
+
t = ""
|
28 |
+
for out in streamer:
|
29 |
+
t += out
|
30 |
+
yield t
|
31 |
+
with gr.Blocks(css="footer{display:none !important}", theme=theme) as demo:
|
32 |
+
gr.Markdown("""
|
33 |
+
# Phi-2 by Microsoft
|
34 |
+
|
35 |
+
This model is licensed under the [Microsoft Research License](https://huggingface.co/microsoft/phi-2/resolve/main/LICENSE). You may only use it for non-commercial purposes.
|
36 |
+
|
37 |
+
By [mrfakename](https://twitter.com/realmrfakename). Inspired by [@randomblock1's demo](https://huggingface.co/spaces/randomblock1/phi-2).
|
38 |
+
""".strip())
|
39 |
+
text = gr.Textbox(label="Prompt", lines=10, interactive=True, placeholder="Write a detailed analogy between mathematics and a lighthouse.")
|
40 |
+
temp = gr.Slider(label="Temperature", minimum=0.1, maximum=1.5, value=0.7)
|
41 |
+
maxlen = gr.Slider(label="Max Length", minimum=4, maximum=512, value=75)
|
42 |
+
go = gr.Button("Generate", variant="primary")
|
43 |
+
go.click(generate_text, inputs=[text, temp, maxlen], outputs=[text])
|
44 |
+
examples = gr.Examples(
|
45 |
+
[
|
46 |
+
['Write a detailed analogy between mathematics and a lighthouse.', 0.7, 75],
|
47 |
+
['Instruct: Write a detailed analogy between mathematics and a lighthouse.\nOutput:', 0.7, 75],
|
48 |
+
['Alice: I don\'t know why, I\'m struggling to maintain focus while studying. Any suggestions?\n\nBob: ', 0.6, 150],
|
49 |
+
['''def print_prime(n):
|
50 |
+
"""
|
51 |
+
Print all primes between 1 and n
|
52 |
+
"""\n''', 0.2, 100],
|
53 |
+
],
|
54 |
+
[text, temp, maxlen]
|
55 |
+
)
|
56 |
+
|
57 |
+
if __name__ == "__main__":
|
58 |
+
demo.launch(show_api=False)
|
59 |
+
|