Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
from transformers import pipeline
|
| 3 |
|
| 4 |
st.set_page_config(page_title="Common NLP Tasks")
|
| 5 |
st.title("Common NLP Tasks")
|
|
@@ -25,8 +25,10 @@ def summarization_model():
|
|
| 25 |
|
| 26 |
@st.cache(show_spinner=False, allow_output_mutation=True)
|
| 27 |
def generation_model():
|
| 28 |
-
model_name = "distilgpt2"
|
| 29 |
-
|
|
|
|
|
|
|
| 30 |
return generator
|
| 31 |
|
| 32 |
if option == "Extractive question answering":
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
|
| 3 |
|
| 4 |
st.set_page_config(page_title="Common NLP Tasks")
|
| 5 |
st.title("Common NLP Tasks")
|
|
|
|
| 25 |
|
| 26 |
@st.cache(show_spinner=False, allow_output_mutation=True)
|
| 27 |
def generation_model():
|
| 28 |
+
# model_name = "distilgpt2"
|
| 29 |
+
tokenizer = AutoTokenizer.from_pretrained("distilgpt2")
|
| 30 |
+
model = AutoModelForCausalLM.from_pretrained("distilgpt2")
|
| 31 |
+
generator = pipeline(model=model, tokenizer=tokenizer, task="text-generation")
|
| 32 |
return generator
|
| 33 |
|
| 34 |
if option == "Extractive question answering":
|