muhammadahmedrayyan
commited on
Commit
•
e035f8f
1
Parent(s):
05b3276
Update app.py
Browse files
app.py
CHANGED
@@ -92,7 +92,8 @@ st.markdown(
|
|
92 |
with st.sidebar:
|
93 |
new_chat_button = st.button("New Chat", key="new_chat", help="Start a new chat to ask a different question.")
|
94 |
if new_chat_button:
|
95 |
-
st.
|
|
|
96 |
|
97 |
selected = option_menu(
|
98 |
menu_title=None,
|
@@ -144,8 +145,9 @@ with col2:
|
|
144 |
with st.spinner("Generating response..."):
|
145 |
try:
|
146 |
if model_selection == "Disease Analysis":
|
|
|
147 |
pipe = create_pipeline("harishussain12/Disease_Managment")
|
148 |
-
|
149 |
context = ""
|
150 |
if uploaded_file is not None:
|
151 |
file_content = read_pdf(uploaded_file)
|
@@ -155,7 +157,7 @@ with col2:
|
|
155 |
context = file_content
|
156 |
|
157 |
query_input = search_input + (f"\n\nContext:\n{context}" if context else "")
|
158 |
-
st.write("Debug: Query Input
|
159 |
|
160 |
response = pipe(query_input, max_length=200, num_return_sequences=1)
|
161 |
st.markdown(f"### Response:\n{response[0]['generated_text']}")
|
|
|
92 |
with st.sidebar:
|
93 |
new_chat_button = st.button("New Chat", key="new_chat", help="Start a new chat to ask a different question.")
|
94 |
if new_chat_button:
|
95 |
+
st.session_state.clear() # Clear session state to simulate a new chat
|
96 |
+
st.experimental_set_query_params() # Clear URL query params
|
97 |
|
98 |
selected = option_menu(
|
99 |
menu_title=None,
|
|
|
145 |
with st.spinner("Generating response..."):
|
146 |
try:
|
147 |
if model_selection == "Disease Analysis":
|
148 |
+
# Adjusted model name to match working configurations
|
149 |
pipe = create_pipeline("harishussain12/Disease_Managment")
|
150 |
+
|
151 |
context = ""
|
152 |
if uploaded_file is not None:
|
153 |
file_content = read_pdf(uploaded_file)
|
|
|
157 |
context = file_content
|
158 |
|
159 |
query_input = search_input + (f"\n\nContext:\n{context}" if context else "")
|
160 |
+
st.write(f"Debug: Query Input - {query_input}") # Debug log for troubleshooting
|
161 |
|
162 |
response = pipe(query_input, max_length=200, num_return_sequences=1)
|
163 |
st.markdown(f"### Response:\n{response[0]['generated_text']}")
|