Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,8 @@
|
|
1 |
import torch
|
2 |
import spaces
|
|
|
|
|
|
|
3 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
4 |
# True
|
5 |
if torch.cuda.is_available():
|
@@ -111,11 +114,9 @@ STYLE = """
|
|
111 |
}
|
112 |
"""
|
113 |
|
114 |
-
from transformers import GPT2Tokenizer, AutoModelForCausalLM, AutoTokenizer
|
115 |
-
import numpy as np
|
116 |
-
|
117 |
tokenizer = AutoTokenizer.from_pretrained("gpt2")
|
118 |
model = AutoModelForCausalLM.from_pretrained("gpt2")
|
|
|
119 |
tokenizer.pad_token_id = tokenizer.eos_token_id
|
120 |
print("Loading finished.")
|
121 |
def generate_html(token, node):
|
@@ -226,7 +227,7 @@ with gr.Blocks(
|
|
226 |
),
|
227 |
css=STYLE,
|
228 |
) as demo:
|
229 |
-
text = gr.Textbox(label="Sentence to decode from
|
230 |
steps = gr.Slider(label="Number of steps", minimum=1, maximum=10, step=1, value=4)
|
231 |
beams = gr.Slider(label="Number of beams", minimum=1, maximum=3, step=1, value=3)
|
232 |
button = gr.Button()
|
|
|
1 |
import torch
|
2 |
import spaces
|
3 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
+
import numpy as np
|
5 |
+
|
6 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
7 |
# True
|
8 |
if torch.cuda.is_available():
|
|
|
114 |
}
|
115 |
"""
|
116 |
|
|
|
|
|
|
|
117 |
tokenizer = AutoTokenizer.from_pretrained("gpt2")
|
118 |
model = AutoModelForCausalLM.from_pretrained("gpt2")
|
119 |
+
|
120 |
tokenizer.pad_token_id = tokenizer.eos_token_id
|
121 |
print("Loading finished.")
|
122 |
def generate_html(token, node):
|
|
|
227 |
),
|
228 |
css=STYLE,
|
229 |
) as demo:
|
230 |
+
text = gr.Textbox(label="Sentence to decode from", value="Today is")
|
231 |
steps = gr.Slider(label="Number of steps", minimum=1, maximum=10, step=1, value=4)
|
232 |
beams = gr.Slider(label="Number of beams", minimum=1, maximum=3, step=1, value=3)
|
233 |
button = gr.Button()
|