Shankarm08 commited on
Commit
9b01440
·
verified ·
1 Parent(s): 69dc379

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -27
app.py CHANGED
@@ -2,46 +2,43 @@
2
  #Once you have Streamlit installed, you can import it into your Python script using the import statement,
3
 
4
  import streamlit as st
 
5
 
6
- from langchain_openai import OpenAI
7
- from langchain_huggingface import HuggingFaceEndpoint
8
 
9
-
10
-
11
- #import os
12
-
13
- #os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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"
14
-
15
- #Function to return the response
16
  def load_answer(question):
17
- llm = OpenAI(model_name="gpt-3.5-turbo-instruct",temperature=0)
18
- # llm = HuggingFaceEndpoint(
19
- #repo_id="mistralai/Mistral-7B-Instruct-v0.3") # Model link : https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.3
20
-
21
- answer=llm.invoke(question)
 
 
 
 
22
  return answer
23
 
24
-
25
- #App UI starts here
26
  st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
27
  st.header("LangChain Demo")
28
 
29
- #Gets the user input
30
  def get_text():
31
  input_text = st.text_input("You: ", key="input")
32
  return input_text
33
 
 
 
34
 
35
- user_input=get_text()
36
- if user_input!= "":
37
- response = load_answer(user_input)
38
-
39
- submit = st.button('Generate')
40
-
41
- #If generate button is clicked
42
- if submit:
43
 
 
 
 
44
  st.subheader("Answer:")
45
-
46
  st.write(response)
47
-
 
 
2
  #Once you have Streamlit installed, you can import it into your Python script using the import statement,
3
 
4
  import streamlit as st
5
+ from langchain import HuggingFaceHub # Correct import for Hugging Face
6
 
7
+ # Set your Hugging Face API token
8
+ HUGGINGFACE_API_TOKEN = "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"
9
 
10
+ # Function to return the response from Hugging Face model
 
 
 
 
 
 
11
  def load_answer(question):
12
+ # Initialize the Hugging Face model
13
+ llm = HuggingFaceHub(
14
+ repo_id="mistralai/Mistral-7B-Instruct-v0.3", # Specify the Hugging Face model
15
+ huggingfacehub_api_token=HUGGINGFACE_API_TOKEN, # Pass your API token
16
+ model_kwargs={"temperature": 0} # Optional: Control response randomness
17
+ )
18
+
19
+ # Call the model with the user's question and get the response
20
+ answer = llm(question)
21
  return answer
22
 
23
+ # Streamlit App UI starts here
 
24
  st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
25
  st.header("LangChain Demo")
26
 
27
+ # Function to get user input
28
  def get_text():
29
  input_text = st.text_input("You: ", key="input")
30
  return input_text
31
 
32
+ # Get user input
33
+ user_input = get_text()
34
 
35
+ # Create a button for generating the response
36
+ submit = st.button('Generate')
 
 
 
 
 
 
37
 
38
+ # If generate button is clicked and user input is not empty
39
+ if submit and user_input:
40
+ response = load_answer(user_input)
41
  st.subheader("Answer:")
 
42
  st.write(response)
43
+ elif submit:
44
+ st.warning("Please enter a question.") # Warning for empty input