import gradio as gr | |
from transformers import pipeline | |
# Load a question-answering model instead of a text generator | |
qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2") | |
def get_answer(question): | |
# Define a factual context | |
context = """ | |
Washington, D.C. is the capital of the United States of America. | |
""" | |
answer = qa_pipeline(question=question, context=context) | |
return answer["answer"] | |
# Create Gradio Interface | |
iface = gr.Interface( | |
fn=get_answer, | |
inputs="text", | |
outputs="text", | |
title="Ask Any Question", | |
description="Ask factual questions and get precise answers." | |
) | |
iface.launch() | |