import streamlit as st from langchain.llms import HuggingFaceHub # Function to return the response def generate_answer(query): llm = HuggingFaceHub( repo_id="huggingfaceh4/zephyr-7b-alpha", model_kwargs={"temperature": 0.5, "max_length": 64, "max_new_tokens": 512} ) prompt = f""" You are helpful doctor assistant that gives patients advices. Please answer the patients in a kind and helpful way. {query} """ result = llm.predict(prompt) return result # App UI starts here st.set_page_config(page_title="LangChain Demo", page_icon=":robot:") st.header("LangChain Demo") # Gets User Input user_input = st.text_input("You: ", key="input") submit = st.button("Generate") # If the button clicked if submit: st.subheader("Answer:") response = generate_answer(user_input) st.write(response)