feministmystique commited on
Commit
f18f917
Β·
verified Β·
1 Parent(s): cdb473b

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +10 -1
src/streamlit_app.py CHANGED
@@ -7,6 +7,11 @@ QUESTION = "Compute the integral of f(x) = x^2."
7
  MODEL = "mistralai/Mistral-7B-Instruct-v0.3"
8
  hf_token = os.getenv("HF_TOKEN")
9
 
 
 
 
 
 
10
  # remembers session
11
  if "response" not in st.session_state:
12
  st.session_state.response = ""
@@ -27,7 +32,8 @@ def get_llm(model_id=MODEL, max_new_tokens=130, temperature=0.7):
27
  llm = HuggingFaceEndpoint(
28
  repo_id=model_id,
29
  max_new_tokens=max_new_tokens,
30
- temperature=temperature
 
31
  )
32
  return llm
33
 
@@ -82,6 +88,9 @@ with st.container():
82
  if st.button("πŸ“ Explain the question"):
83
  prompt = f"[INST]You are a thoughtful AI assistant.\nUser: {QUESTION} [/INST]\nAI:"
84
  st.session_state.response = llm.invoke(prompt)
 
 
 
85
 
86
  st.session_state.button_clicked = "Explain the question"
87
  with col2:
 
7
  MODEL = "mistralai/Mistral-7B-Instruct-v0.3"
8
  hf_token = os.getenv("HF_TOKEN")
9
 
10
+ # Check if HF token is set
11
+ if not hf_token:
12
+ st.error("HF_TOKEN is not set. Please add it to your HuggingFace secrets.")
13
+ st.stop()
14
+
15
  # remembers session
16
  if "response" not in st.session_state:
17
  st.session_state.response = ""
 
32
  llm = HuggingFaceEndpoint(
33
  repo_id=model_id,
34
  max_new_tokens=max_new_tokens,
35
+ temperature=temperature,
36
+ token=hf_token
37
  )
38
  return llm
39
 
 
88
  if st.button("πŸ“ Explain the question"):
89
  prompt = f"[INST]You are a thoughtful AI assistant.\nUser: {QUESTION} [/INST]\nAI:"
90
  st.session_state.response = llm.invoke(prompt)
91
+
92
+ print("Model Response:", st.session_state.response)
93
+
94
 
95
  st.session_state.button_clicked = "Explain the question"
96
  with col2: