TheBobBob commited on
Commit
3cc7561
·
verified ·
1 Parent(s): 20c9dd9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -18
app.py CHANGED
@@ -178,8 +178,7 @@ def create_vector_db(final_items):
178
  temperature=0.1,
179
  top_p=0.9,
180
  top_k=20,
181
- stream=False,
182
- verbose = True
183
  )
184
 
185
  # Extract the generated summary text
@@ -242,8 +241,7 @@ def generate_response(db, query_text, previous_context):
242
  stream=True, # Enable streaming
243
  temperature=0.1,
244
  top_p=0.9,
245
- top_k=20,
246
- verbose = True
247
  )
248
 
249
  # Use Streamlit to stream the response in real-time
@@ -303,20 +301,19 @@ def streamlit_app():
303
  if db:
304
  st.write("Models have been processed and added to the database.")
305
 
306
- # Move user query input outside of the model search block
307
- user_query = st.text_input("Ask a question about the biomodels:")
 
308
 
309
- # Ensure that query submission and response generation occur properly
310
- if user_query and db:
311
- if 'previous_context' not in st.session_state:
312
- st.session_state.previous_context = ""
313
-
314
- # Stream the response incrementally for the second generation
315
- response = generate_response(db, user_query, st.session_state.previous_context)
316
- st.write(f"Final Response: {response}")
317
-
318
- st.session_state.previous_context += f"{response}\n"
319
 
320
  if __name__ == "__main__":
321
- streamlit_app()
322
-
 
178
  temperature=0.1,
179
  top_p=0.9,
180
  top_k=20,
181
+ stream=False
 
182
  )
183
 
184
  # Extract the generated summary text
 
241
  stream=True, # Enable streaming
242
  temperature=0.1,
243
  top_p=0.9,
244
+ top_k=20
 
245
  )
246
 
247
  # Use Streamlit to stream the response in real-time
 
301
  if db:
302
  st.write("Models have been processed and added to the database.")
303
 
304
+ # Check if the database is created before showing the query input
305
+ if db:
306
+ user_query = st.text_input("Ask a question about the biomodels:")
307
 
308
+ if user_query:
309
+ if 'previous_context' not in st.session_state:
310
+ st.session_state.previous_context = ""
311
+
312
+ # Stream the response incrementally for the second generation
313
+ response = generate_response(db, user_query, st.session_state.previous_context)
314
+ st.write(f"Final Response: {response}")
315
+
316
+ st.session_state.previous_context += f"{response}\n"
 
317
 
318
  if __name__ == "__main__":
319
+ streamlit_app()