Jaehan's picture
Update app.py
3cd7b9b
raw
history blame
560 Bytes
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
from transformers import pipeline
import gradio as grad
import AssertionError
model_name="deepset/roberta-base-squad2"
qa_pipeline = pipeline("question-answering", model=model_name, tokenizer=model_name)
def answer2question(question, context):
text = "{" + "'question': " + question + "','context': '" + context + "'}"
di = ast.literal_eval(text)
response = qa_pipeline(di)
return response
grad.Interface(answer2question, inputs=["text", "text"], outputs="text").launch()