# -*- coding: utf-8 -*-
"""Question_Answering_Gradio.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/12ga045iO8c2vMqYZQY4zPttnEv8dIUZe
"""

from transformers import pipeline
from transformers import AutoModelForQuestionAnswering
from transformers import AutoTokenizer


model_checkpoint = "Madhana/distilroberta-base-finetuned-wikitext2-SQuAD-qa-WandB2"
new_model = AutoModelForQuestionAnswering.from_pretrained(model_checkpoint)
tokenizer = AutoTokenizer.from_pretrained("distilroberta-base", use_fast=True)
qa = pipeline("question-answering", new_model, tokenizer, tokenizer)


import gradio as gr

demo = gr.Blocks()

with demo:
    gr.Markdown("Language Model QA Demo")
    with gr.Tabs():
        with gr.TabItem("Question Answering"):
            with gr.Row():
                qa_input = gr.Textbox(label = "Input Text")
                qa_context = gr.Textbox(label = "Input Context")
                qa_output = gr.Textbox(label = "Output")
            qa_button = gr.Button("Answer")

    qa_button.click(qa, inputs=[qa_input, qa_context], outputs=qa_output)
    

demo.launch() # share=True