Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,17 +1,19 @@
|
|
1 |
-
import
|
2 |
-
from transformers import pipeline
|
3 |
|
4 |
-
|
5 |
-
|
6 |
-
st.title("English to German")
|
7 |
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
clicked = st.form_submit_button("Submit")
|
12 |
-
if clicked:
|
13 |
-
results = classifier([text])
|
14 |
-
st.json(results)
|
15 |
|
16 |
-
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import AutoModelWithLMHead, AutoTokenizer
|
|
|
2 |
|
3 |
+
tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
|
4 |
+
model = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
|
|
|
5 |
|
6 |
+
def get_question(answer, context, max_length=64):
|
7 |
+
input_text = "answer: %s context: %s </s>" % (answer, context)
|
8 |
+
features = tokenizer([input_text], return_tensors='pt')
|
|
|
|
|
|
|
|
|
9 |
|
10 |
+
output = model.generate(input_ids=features['input_ids'],
|
11 |
+
attention_mask=features['attention_mask'],
|
12 |
+
max_length=max_length)
|
13 |
+
|
14 |
+
return tokenizer.decode(output[0])
|
15 |
+
|
16 |
+
context = "Manuel has created RuPERTa-base with the support of HF-Transformers and Google"
|
17 |
+
answer = "Manuel"
|
18 |
+
|
19 |
+
get_question(answer, context)
|