Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,9 +4,9 @@ from transformers import T5Tokenizer, T5ForConditionalGeneration
|
|
| 4 |
|
| 5 |
# xl size run out of memory on 16GB vm
|
| 6 |
# All the models have input length of 512 tokens and outputs of 512 tokens
|
| 7 |
-
# small
|
| 8 |
-
# base
|
| 9 |
-
# large
|
| 10 |
# xl
|
| 11 |
# xxl
|
| 12 |
model_name = "large"
|
|
@@ -56,7 +56,7 @@ def text2text(input_text):
|
|
| 56 |
with gr.Blocks() as demo:
|
| 57 |
gr.Markdown(
|
| 58 |
"""
|
| 59 |
-
# Flan T5 Large Demo
|
| 60 |
780M parameter Large language model fine tuned on diverse tasks.
|
| 61 |
Prompt the model in the Input box. Models output is appended to input. To get additional generation hit submit again.
|
| 62 |
""")
|
|
|
|
| 4 |
|
| 5 |
# xl size run out of memory on 16GB vm
|
| 6 |
# All the models have input length of 512 tokens and outputs of 512 tokens
|
| 7 |
+
# small 80M param
|
| 8 |
+
# base 250M
|
| 9 |
+
# large 780M
|
| 10 |
# xl
|
| 11 |
# xxl
|
| 12 |
model_name = "large"
|
|
|
|
| 56 |
with gr.Blocks() as demo:
|
| 57 |
gr.Markdown(
|
| 58 |
"""
|
| 59 |
+
# Flan T5 Large Demo (Chat Mode)
|
| 60 |
780M parameter Large language model fine tuned on diverse tasks.
|
| 61 |
Prompt the model in the Input box. Models output is appended to input. To get additional generation hit submit again.
|
| 62 |
""")
|