import gradio as gr from langchain import HuggingFaceHub, PromptTemplate, LLMChain from langchain.memory import ConversationBufferMemory repo_id = "tiiuae/falcon-7b-instruct" template = """You are a chatbot having a conversation with a human. {chat_history} Human: {human_input} Chatbot:""" prompt = PromptTemplate(template=template, input_variables=["chat_history", "human_input"]) def generate_response(question, huggingfacehub_api_token, temperature=0.6, max_new_tokens=500): try: memory = ConversationBufferMemory(memory_key="chat_history") llm = HuggingFaceHub( huggingfacehub_api_token=huggingfacehub_api_token, repo_id=repo_id, model_kwargs={"temperature": temperature, "max_new_tokens": max_new_tokens}, ) llm_chain = LLMChain(prompt=prompt, llm=llm, memory=memory) response = llm_chain.predict(chat_history="", human_input=question) except ValueError as e: response = "An error occurred while processing your request. Please try again later." print(f"Error: {str(e)}") return response inputs = [ gr.inputs.Textbox(label="Question"), gr.inputs.Textbox(label="HuggingFace API Token", type="password", default=None), gr.inputs.Slider(minimum=0.1, maximum=2.0, default=0.6, label="Temperature"), gr.inputs.Slider(minimum=100, maximum=1000, default=500, label="Max New Tokens"), ] outputs = gr.outputs.HTML(label="Response") title = "Chatbot Interface [BETA]" description = "Provide a question and get helpful answers from the AI assistant. Create your huggingface account and get your API token from https://huggingface.co/settings/token." theme=gr.themes.Base(primary_hue="blue", secondary_hue="blue") examples = [["write a poem on Iron Man"], ["What are the benefits of using Python?"]] iface = gr.Interface( fn=generate_response, inputs=inputs, outputs=outputs, theme=theme, title=title, description=description, allow_flagging="never", examples=examples, css="footer {visibility: hidden}", ) iface.launch(debug=True, show_api=False)