feministmystique commited on
Commit
10dac13
Β·
verified Β·
1 Parent(s): 3fce44a

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +63 -46
src/streamlit_app.py CHANGED
@@ -1,4 +1,5 @@
1
  import os
 
2
  from langchain_huggingface import HuggingFaceEndpoint
3
  import streamlit as st
4
 
@@ -18,23 +19,24 @@ if "response" not in st.session_state:
18
  st.session_state.response = ""
19
 
20
  def get_llm(model_id=MODEL, max_new_tokens=300, temperature=0.7):
21
- os.environ["HF_TOKEN"] = hf_token # ensure token is accessible to endpoint
 
22
  return HuggingFaceEndpoint(
23
  repo_id=model_id,
24
  max_new_tokens=max_new_tokens,
25
  temperature=temperature,
26
  )
27
-
28
  # create llm
29
  llm = get_llm()
30
 
31
- # prompts
32
  prompt = f"""
33
  You are an AI assistant designed to support high school students in the subject of {SUBJECT}.
34
  Your role is to offer friendly, helpful, concise, in-depth guidance, just like a supportive teacher would.
35
 
36
  Please follow these guidelines:
37
-
38
  1. Maintain a polite, respectful, and professional tone at all times.
39
  2. Adhere to ethical principles β€” do not promote cheating, harmful behavior, or misinformation.
40
  3. Interact in a warm, encouraging, and student-centered style β€” use clear explanations, positive reinforcement, and examples when needed.
@@ -54,10 +56,11 @@ p_application = """
54
  6. Do not include any explanation or examples in your response.
55
  """
56
 
57
- # count the number of times "I don't know" is clicked
58
  if "retry_count" not in st.session_state:
59
  st.session_state.retry_count = 0
60
 
 
61
  # Initialize session state
62
  if "help_clicks" not in st.session_state:
63
  st.session_state.help_clicks = 0
@@ -72,8 +75,9 @@ st.markdown(
72
  f"<p style='font-size:20px;'>{QUESTION}</p>",
73
  unsafe_allow_html=True
74
  )
75
-
76
  with st.container():
 
77
  st.text_area(
78
  label="Type your response here.",
79
  value="",
@@ -83,74 +87,87 @@ with st.container():
83
 
84
  st.markdown("")
85
 
 
86
  def toggle_help():
87
  st.session_state.help_clicks += 1
88
- st.session_state.button_clicked = None
89
 
 
90
  col1, col2, col3 = st.columns([1, 3, 1])
91
  with col2:
92
  st.button("Help", on_click=toggle_help)
93
 
 
94
  if st.session_state.help_clicks % 2 == 1:
95
  st.markdown("### Need Help?")
96
  st.markdown("Choose an option below to better understand the question.")
97
  with st.container():
98
- st.markdown("---")
99
  col1, col2, col3 = st.columns(3)
100
 
101
  with col1:
102
  if st.button("πŸ“ Explain the question"):
103
  if st.session_state.button_clicked != "Explain the question":
104
- full_prompt = f"[INST]<<SYS>>\n{prompt + p_explanation}\n<</SYS>>\n\n{QUESTION}\n[/INST]"
 
 
 
 
 
 
 
105
  st.session_state.response = llm.invoke(full_prompt)
106
  st.session_state.retry_count = 0
107
- st.session_state.full_prompt = full_prompt
108
  st.session_state.button_clicked = "Explain the question"
109
  with col2:
110
- if st.button("πŸ’‘ Give an example"):
111
- if st.session_state.button_clicked != "Give an example":
112
- full_prompt = f"[INST]<<SYS>>\n{prompt + p_example}\n<</SYS>>\n\n{QUESTION}\n[/INST]"
113
- st.session_state.response = llm.invoke(full_prompt)
114
- st.session_state.retry_count = 0
115
- st.session_state.full_prompt = full_prompt
116
- st.session_state.button_clicked = "Give an example"
 
 
 
 
 
 
 
117
  with col3:
118
  if st.button("πŸ€” Who cares?"):
119
- if st.session_state.button_clicked != "Who cares?":
120
- full_prompt = f"[INST]<<SYS>>\n{prompt + p_application}\n<</SYS>>\n\n{QUESTION}\n[/INST]"
121
- st.session_state.response = llm.invoke(full_prompt)
122
- st.session_state.retry_count = 0
123
- st.session_state.full_prompt = full_prompt
124
- st.session_state.button_clicked = "Who cares?"
 
 
 
 
 
 
 
125
  st.markdown("---")
126
 
 
127
  if st.session_state.button_clicked:
128
  with st.container():
129
  st.info(st.session_state.response)
130
-
131
- # if st.session_state.button_clicked == "Explain the question":
132
- # col1, col2, col3 = st.columns([1, 1, 1])
133
- # with col2:
134
- # st.markdown(
135
- # """
136
- # <style>
137
- # div.stButton > button {
138
- # width: 250px!important;
139
- # }
140
- # </style>
141
- # """,
142
- # unsafe_allow_html=True,
143
- # )
144
- # if st.button("I don't understand. Try again.", key="retry_button"):
145
- # st.session_state.retry_count += 1
146
- # alt_llm = get_llm(temperature=0.9)
147
- # st.session_state.response = alt_llm.invoke(st.session_state.full_prompt)
148
- # st.info(st.session_state.response)
149
-
150
- # Footer spacing
151
  st.markdown("<br><br>", unsafe_allow_html=True)
152
 
153
- # Global button styling
154
  st.markdown(
155
  """
156
  <style>
@@ -165,4 +182,4 @@ st.markdown(
165
  </style>
166
  """,
167
  unsafe_allow_html=True,
168
- )
 
1
  import os
2
+ os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
3
  from langchain_huggingface import HuggingFaceEndpoint
4
  import streamlit as st
5
 
 
19
  st.session_state.response = ""
20
 
21
  def get_llm(model_id=MODEL, max_new_tokens=300, temperature=0.7):
22
+ os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN") # Optional but ensures it's set
23
+
24
  return HuggingFaceEndpoint(
25
  repo_id=model_id,
26
  max_new_tokens=max_new_tokens,
27
  temperature=temperature,
28
  )
29
+
30
  # create llm
31
  llm = get_llm()
32
 
33
+ # prompts
34
  prompt = f"""
35
  You are an AI assistant designed to support high school students in the subject of {SUBJECT}.
36
  Your role is to offer friendly, helpful, concise, in-depth guidance, just like a supportive teacher would.
37
 
38
  Please follow these guidelines:
39
+
40
  1. Maintain a polite, respectful, and professional tone at all times.
41
  2. Adhere to ethical principles β€” do not promote cheating, harmful behavior, or misinformation.
42
  3. Interact in a warm, encouraging, and student-centered style β€” use clear explanations, positive reinforcement, and examples when needed.
 
56
  6. Do not include any explanation or examples in your response.
57
  """
58
 
59
+ # count the number of times "I don't know is clicked"
60
  if "retry_count" not in st.session_state:
61
  st.session_state.retry_count = 0
62
 
63
+
64
  # Initialize session state
65
  if "help_clicks" not in st.session_state:
66
  st.session_state.help_clicks = 0
 
75
  f"<p style='font-size:20px;'>{QUESTION}</p>",
76
  unsafe_allow_html=True
77
  )
78
+ # Outer container for neat padding
79
  with st.container():
80
+ # Question area
81
  st.text_area(
82
  label="Type your response here.",
83
  value="",
 
87
 
88
  st.markdown("")
89
 
90
+ # Help Button Logic
91
  def toggle_help():
92
  st.session_state.help_clicks += 1
93
+ st.session_state.button_clicked = None # Reset help text on new toggle
94
 
95
+ # Help Button (main toggle)
96
  col1, col2, col3 = st.columns([1, 3, 1])
97
  with col2:
98
  st.button("Help", on_click=toggle_help)
99
 
100
+ # Show 3 sub-buttons if Help clicked an odd number of times
101
  if st.session_state.help_clicks % 2 == 1:
102
  st.markdown("### Need Help?")
103
  st.markdown("Choose an option below to better understand the question.")
104
  with st.container():
105
+ st.markdown("---") # Divider for clarity
106
  col1, col2, col3 = st.columns(3)
107
 
108
  with col1:
109
  if st.button("πŸ“ Explain the question"):
110
  if st.session_state.button_clicked != "Explain the question":
111
+ # First time clicked
112
+ full_prompt = (
113
+ "[INST]<<SYS>>\n"
114
+ f"{prompt + p_explanation}\n"
115
+ "<</SYS>>\n\n"
116
+ f"{QUESTION}\n"
117
+ "[/INST]"
118
+ )
119
  st.session_state.response = llm.invoke(full_prompt)
120
  st.session_state.retry_count = 0
121
+ st.session_state.full_prompt = full_prompt # Save prompt for retry
122
  st.session_state.button_clicked = "Explain the question"
123
  with col2:
124
+ if st.button("πŸ’‘ Give an example"):
125
+ if st.session_state.button_clicked != "Give an example":
126
+ # First time clicked
127
+ full_prompt = (
128
+ "[INST]<<SYS>>\n"
129
+ f"{prompt + p_example}\n"
130
+ "<</SYS>>\n\n"
131
+ f"{QUESTION}\n"
132
+ "[/INST]"
133
+ )
134
+ st.session_state.response = llm.invoke(full_prompt)
135
+ st.session_state.retry_count = 0
136
+ st.session_state.full_prompt = full_prompt # Save prompt for retry
137
+ st.session_state.button_clicked = "Give an example"
138
  with col3:
139
  if st.button("πŸ€” Who cares?"):
140
+ if st.session_state.button_clicked != "Who cares?":
141
+ # First time clicked
142
+ full_prompt = (
143
+ "[INST]<<SYS>>\n"
144
+ f"{prompt + p_application}\n"
145
+ "<</SYS>>\n\n"
146
+ f"{QUESTION}\n"
147
+ "[/INST]"
148
+ )
149
+ st.session_state.response = llm.invoke(full_prompt)
150
+ st.session_state.retry_count = 0
151
+ st.session_state.full_prompt = full_prompt # Save prompt for retry
152
+ st.session_state.button_clicked = "Who cares?"
153
  st.markdown("---")
154
 
155
+ # Display response text if a sub-button is clicked
156
  if st.session_state.button_clicked:
157
  with st.container():
158
  st.info(st.session_state.response)
159
+
160
+ if st.session_state.button_clicked == "Explain the question":
161
+ if st.button("I don't understand. Try again."):
162
+ st.session_state.retry_count += 1
163
+ alt_llm = get_llm(temperature=0.9)
164
+ st.session_state.response = alt_llm.invoke(
165
+ st.session_state.full_prompt
166
+ )
167
+ # Optional: Add footer or spacing
 
 
 
 
 
 
 
 
 
 
 
 
168
  st.markdown("<br><br>", unsafe_allow_html=True)
169
 
170
+ # css
171
  st.markdown(
172
  """
173
  <style>
 
182
  </style>
183
  """,
184
  unsafe_allow_html=True,
185
+ )