feministmystique commited on
Commit
d8b0266
·
verified ·
1 Parent(s): cf77265

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +19 -15
src/streamlit_app.py CHANGED
@@ -1,6 +1,6 @@
1
  import os
2
  os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
3
- from langchain_huggingface import ChatHuggingFace
4
  import streamlit as st
5
 
6
  # constants
@@ -19,10 +19,12 @@ if "response" not in st.session_state:
19
  st.session_state.response = ""
20
 
21
  def get_llm(model_id=MODEL, max_new_tokens=300, temperature=0.7):
22
- return ChatHuggingFace(
 
 
23
  repo_id=model_id,
24
- temperature=temperature,
25
  max_new_tokens=max_new_tokens,
 
26
  )
27
 
28
  # create llm
@@ -54,6 +56,11 @@ p_application = """
54
  6. Do not include any explanation or examples in your response.
55
  """
56
 
 
 
 
 
 
57
  # Initialize session state
58
  if "help_clicks" not in st.session_state:
59
  st.session_state.help_clicks = 0
@@ -149,19 +156,16 @@ with st.container():
149
  if st.session_state.button_clicked:
150
  with st.container():
151
  st.info(st.session_state.response)
152
-
153
- if st.session_state.button_clicked:
154
- with st.container():
155
- st.info(st.session_state.response)
156
 
157
- if st.session_state.button_clicked == "Explain the question":
158
- if st.button("I don't understand. Try again."):
159
- st.session_state.retry_count += 1
160
- alt_llm = get_llm(temperature=0.9)
161
- st.session_state.response = alt_llm.invoke(
162
- st.session_state.full_prompt
163
- )
164
-
 
165
  # Optional: Add footer or spacing
166
  st.markdown("<br><br>", unsafe_allow_html=True)
167
 
 
1
  import os
2
  os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
3
+ from langchain_huggingface import HuggingFaceEndpoint
4
  import streamlit as st
5
 
6
  # constants
 
19
  st.session_state.response = ""
20
 
21
  def get_llm(model_id=MODEL, max_new_tokens=300, temperature=0.7):
22
+ os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN") # Optional but ensures it's set
23
+
24
+ return HuggingFaceEndpoint(
25
  repo_id=model_id,
 
26
  max_new_tokens=max_new_tokens,
27
+ temperature=temperature,
28
  )
29
 
30
  # create llm
 
56
  6. Do not include any explanation or examples in your response.
57
  """
58
 
59
+ # count the number of times "I don't know is clicked"
60
+ if "retry_count" not in st.session_state:
61
+ st.session_state.retry_count = 0
62
+
63
+
64
  # Initialize session state
65
  if "help_clicks" not in st.session_state:
66
  st.session_state.help_clicks = 0
 
156
  if st.session_state.button_clicked:
157
  with st.container():
158
  st.info(st.session_state.response)
 
 
 
 
159
 
160
+ if st.session_state.button_clicked == "Explain the question":
161
+ col1, col2, col3 = st.columns([1, 1, 1])
162
+ with col2:
163
+ if st.button("I don't understand. Try again.", key="retry_button"):
164
+ st.session_state.retry_count += 1
165
+ alt_llm = get_llm(temperature=0.9)
166
+ st.session_state.response = alt_llm.invoke(
167
+ st.session_state.full_prompt
168
+ )
169
  # Optional: Add footer or spacing
170
  st.markdown("<br><br>", unsafe_allow_html=True)
171