Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline | |
# 1. Choose a bilingual or multilingual QA model | |
MODEL_NAME = "mrm8488/xlm-roberta-large-finetuned-squadv2" | |
# 2. Load model + tokenizer | |
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
model = AutoModelForQuestionAnswering.from_pretrained(MODEL_NAME) | |
# 3. Initialize QA pipeline | |
qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer) | |
# 4. Load or define custom knowledge base | |
with open("knowledge.txt", "r", encoding="utf-8") as f: | |
knowledge_text = f.read() | |
# 5. Define function to answer questions | |
def answer_question(question): | |
if not question.strip(): | |
return "Please ask a valid question." | |
try: | |
result = qa_pipeline(question=question, context=knowledge_text) | |
return result["answer"] | |
except Exception as e: | |
return f"Error: {str(e)}" | |
# 6. Build Gradio interface | |
iface = gr.Interface( | |
fn=answer_question, | |
inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."), | |
outputs="text", | |
title="Budtender LLM (Bilingual QA)", | |
description=( | |
"A bilingual Q&A model trained on Spanish and English data. " | |
"Ask your cannabis-related questions here!" | |
) | |
) | |
# 7. Launch app | |
if __name__ == "__main__": | |
iface.launch() | |