basic_llm / app.py
savan360's picture
Update app.py
d865a2c verified
raw
history blame
737 Bytes
import gradio as gr
from transformers import pipeline
# Load the QA model only once at the start
qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2")
def get_answer(question):
# Predefined factual context
context = """
Washington, D.C. is the capital of the United States of America.
New Delhi is the capital of India.
London is the capital of the United Kingdom.
"""
answer = qa_pipeline(question=question, context=context)
return answer["answer"]
# Create Gradio Interface
iface = gr.Interface(
fn=get_answer,
inputs="text",
outputs="text",
title="Ask Any Question",
description="Ask factual questions and get precise answers."
)
iface.launch()