TheBobBob commited on
Commit
85c5e97
·
verified ·
1 Parent(s): e6ee09e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -7
app.py CHANGED
@@ -298,10 +298,11 @@ def streamlit_app(db):
298
 
299
  final_items.extend(items)
300
 
301
- db = create_vector_db(final_items) # Renamed 'db' to avoid overwriting
302
 
303
  st.write("Models have been processed and added to the database.")
304
-
 
305
  @st.cache_resource
306
  def get_messages(db):
307
  if "messages" not in st.session_state:
@@ -310,20 +311,26 @@ def streamlit_app(db):
310
 
311
  st.session_state.messages = get_messages(db)
312
 
 
313
  for message in st.session_state.messages:
314
  with st.chat_message(message["role"]):
315
  st.markdown(message["content"])
316
 
317
- if prompt := st.chat_input(query_text):
 
 
318
  st.chat_message("user").markdown(prompt)
319
  st.session_state.messages.append({"role": "user", "content": prompt})
320
- response = generate_response(db, query_text, st.session_state)
321
-
 
 
 
322
  with st.chat_message("assistant"):
323
  st.markdown(response)
324
 
 
325
  st.session_state.messages.append({"role": "assistant", "content": response})
326
-
327
-
328
  if __name__ == "__main__":
329
  streamlit_app(db)
 
298
 
299
  final_items.extend(items)
300
 
301
+ db = create_vector_db(final_items) # Create or update the database with final items
302
 
303
  st.write("Models have been processed and added to the database.")
304
+
305
+ # Cache the chat messages
306
  @st.cache_resource
307
  def get_messages(db):
308
  if "messages" not in st.session_state:
 
311
 
312
  st.session_state.messages = get_messages(db)
313
 
314
+ # Display chat history
315
  for message in st.session_state.messages:
316
  with st.chat_message(message["role"]):
317
  st.markdown(message["content"])
318
 
319
+ # Chat input will act as the query input for the model
320
+ if prompt := st.chat_input("Ask a question about the models:"):
321
+ # Add user input to chat
322
  st.chat_message("user").markdown(prompt)
323
  st.session_state.messages.append({"role": "user", "content": prompt})
324
+
325
+ # Generate the response from the model
326
+ response = generate_response(db, prompt, st.session_state.messages) # Pass the prompt to generate_response
327
+
328
+ # Display assistant response
329
  with st.chat_message("assistant"):
330
  st.markdown(response)
331
 
332
+ # Add the assistant response to the chat history
333
  st.session_state.messages.append({"role": "assistant", "content": response})
334
+
 
335
  if __name__ == "__main__":
336
  streamlit_app(db)