Update src/streamlit_app.py
Browse files- src/streamlit_app.py +6 -3
src/streamlit_app.py
CHANGED
@@ -2,6 +2,7 @@ import os
|
|
2 |
os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
|
3 |
from langchain_huggingface import HuggingFaceEndpoint
|
4 |
import streamlit as st
|
|
|
5 |
|
6 |
# constants
|
7 |
QUESTION = "Compute the integral of f(x) = x^2."
|
@@ -116,7 +117,7 @@ with st.container():
|
|
116 |
f"{QUESTION}\n"
|
117 |
"[/INST]"
|
118 |
)
|
119 |
-
st.session_state.response = llm.invoke(full_prompt)
|
120 |
st.session_state.retry_count = 0
|
121 |
st.session_state.full_prompt = full_prompt # Save prompt for retry
|
122 |
st.session_state.button_clicked = "Explain the question"
|
@@ -131,7 +132,7 @@ with st.container():
|
|
131 |
f"{QUESTION}\n"
|
132 |
"[/INST]"
|
133 |
)
|
134 |
-
st.session_state.response = llm.invoke(full_prompt)
|
135 |
st.session_state.retry_count = 0
|
136 |
st.session_state.full_prompt = full_prompt # Save prompt for retry
|
137 |
st.session_state.button_clicked = "Give an example"
|
@@ -146,7 +147,7 @@ with st.container():
|
|
146 |
f"{QUESTION}\n"
|
147 |
"[/INST]"
|
148 |
)
|
149 |
-
st.session_state.response = llm.invoke(full_prompt)
|
150 |
st.session_state.retry_count = 0
|
151 |
st.session_state.full_prompt = full_prompt # Save prompt for retry
|
152 |
st.session_state.button_clicked = "Who cares?"
|
@@ -166,6 +167,8 @@ with st.container():
|
|
166 |
st.session_state.response = alt_llm.invoke(
|
167 |
st.session_state.full_prompt
|
168 |
)
|
|
|
|
|
169 |
# Optional: Add footer or spacing
|
170 |
st.markdown("<br><br>", unsafe_allow_html=True)
|
171 |
|
|
|
2 |
os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
|
3 |
from langchain_huggingface import HuggingFaceEndpoint
|
4 |
import streamlit as st
|
5 |
+
from langchain_core.messages import HumanMessage
|
6 |
|
7 |
# constants
|
8 |
QUESTION = "Compute the integral of f(x) = x^2."
|
|
|
117 |
f"{QUESTION}\n"
|
118 |
"[/INST]"
|
119 |
)
|
120 |
+
st.session_state.response = llm.invoke([HumanMessage(content=full_prompt)])
|
121 |
st.session_state.retry_count = 0
|
122 |
st.session_state.full_prompt = full_prompt # Save prompt for retry
|
123 |
st.session_state.button_clicked = "Explain the question"
|
|
|
132 |
f"{QUESTION}\n"
|
133 |
"[/INST]"
|
134 |
)
|
135 |
+
st.session_state.response = llm.invoke([HumanMessage(content=full_prompt)])
|
136 |
st.session_state.retry_count = 0
|
137 |
st.session_state.full_prompt = full_prompt # Save prompt for retry
|
138 |
st.session_state.button_clicked = "Give an example"
|
|
|
147 |
f"{QUESTION}\n"
|
148 |
"[/INST]"
|
149 |
)
|
150 |
+
st.session_state.response = llm.invoke([HumanMessage(content=full_prompt)])
|
151 |
st.session_state.retry_count = 0
|
152 |
st.session_state.full_prompt = full_prompt # Save prompt for retry
|
153 |
st.session_state.button_clicked = "Who cares?"
|
|
|
167 |
st.session_state.response = alt_llm.invoke(
|
168 |
st.session_state.full_prompt
|
169 |
)
|
170 |
+
st.info(st.session_state.response)
|
171 |
+
|
172 |
# Optional: Add footer or spacing
|
173 |
st.markdown("<br><br>", unsafe_allow_html=True)
|
174 |
|