import gradio as gr from transformers import pipeline # Load your fine-tuned model from the Hub chatbot = pipeline( "text2text-generation", model="abinashnp/bayedger-chatbot", # or your fine-tuned model ID tokenizer="abinashnp/bayedger-chatbot", ) def respond(query): # generate response out = chatbot(f"question: {query} answer:", max_new_tokens=150, temperature=1.0, top_p=0.9, repetition_penalty=1.1, num_beams=1)[0]["generated_text"] return out # Build Gradio interface with gr.Blocks() as demo: gr.Markdown("# 🤖 Bayedger FAQ Chatbot") txt = gr.Textbox(label="Ask me anything", placeholder="Type your question here…") out = gr.Textbox(label="Answer") txt.submit(respond, txt, out) demo.launch()