Spaces:
Sleeping
Sleeping
File size: 999 Bytes
a3a3d82 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import gradio as gr
from transformers import pipeline
# Load the Hugging Face model
model = "deepset/bert-base-cased-squad2"
qa_pipeline = pipeline("question-answering", model=model, tokenizer=model)
# Define the function to answer questions
def get_answer(text):
result = qa_pipeline({
"question": text,
"context": knowledge_base_text
})
return result["answer"]
# Define the Gradio interface
def chatbot_interface(text):
answer = get_answer(text)
return answer
knowledge_base_file = "knowledge_base.txt" # Path to the knowledge base text file
# Load the knowledge base from a text file
with open(knowledge_base_file, "r") as f:
knowledge_base_text = f.read()
# Create the Gradio interface
iface = gr.Interface(
fn=chatbot_interface,
inputs="Ask any questions about BATB",
outputs="Result",
title="British American Tobacco Bangladesh",
description="- powered by IDT",
theme="default"
)
# Run the Gradio interface
iface.launch() |