import gradio as gr from transformers import pipeline # Load the Hugging Face model model = "deepset/bert-base-cased-squad2" qa_pipeline = pipeline("question-answering", model=model, tokenizer=model) # Define the function to answer questions def get_answer(text): result = qa_pipeline({ "question": text, "context": knowledge_base_text }) return result["answer"] # Define the Gradio interface def chatbot_interface(text): answer = get_answer(text) return answer knowledge_base_file = "knowledge_base.txt" # Path to the knowledge base text file # Load the knowledge base from a text file with open(knowledge_base_file, "r") as f: knowledge_base_text = f.read() # Create the Gradio interface iface = gr.Interface( fn=chatbot_interface, inputs="Ask any questions about BATB", outputs="Result", title="British American Tobacco Bangladesh", description="- powered by IDT", theme="default" ) # Run the Gradio interface iface.launch()