Spaces:
Sleeping
Sleeping
File size: 1,586 Bytes
9d3800d dc79f72 17ca081 9d3800d 261fc9b 9d3800d 9b01440 9d3800d dc79f72 9d3800d 9b01440 9d3800d 9b01440 9d3800d 9b01440 9d3800d 9b01440 9d3800d 9b01440 9d3800d 9b01440 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import streamlit as st
from langchain import HuggingFaceHub
from dotenv import load_dotenv
load_dotenv()
# Set your Hugging Face API token from the environment variable
HUGGINGFACE_API_TOKEN = os.getenv("HUGGINGFACE_API_TOKEN")
# Function to return the response from Hugging Face model
def load_answer(question):
try:
# Initialize the Hugging Face model using LangChain's HuggingFaceHub class
llm = HuggingFaceHub(
repo_id="mistralai/Mistral-7B-Instruct-v0.3", # Hugging Face model repo
huggingfacehub_api_token=HUGGINGFACE_API_TOKEN, # Pass your API token
model_kwargs={"temperature": 0} # Optional: Control response randomness
)
# Call the model with the user's question and get the response
answer = llm.run(question)
return answer
except Exception as e:
# Capture and return any exceptions or errors
return f"Error: {str(e)}"
# Streamlit App UI starts here
st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
st.header("LangChain Demo")
# Function to get user input
def get_text():
input_text = st.text_input("You: ", key="input")
return input_text
# Get user input
user_input = get_text()
# Create a button for generating the response
submit = st.button('Generate')
# If generate button is clicked and user input is not empty
if submit and user_input:
response = load_answer(user_input)
st.subheader("Answer:")
st.write(response)
elif submit:
st.warning("Please enter a question.") # Warning for empty input
|