rag-chat / pages /2_Chat.py
tokensandcharms's picture
Enhance .gitignore and improve app.py functionality. Added additional file types to .gitignore for better exclusion. Updated app.py to manage upload progress with Streamlit session state, improved UI layout, and added documentation for user guidance.
7211b51
import streamlit as st
from app import chat_with_ai
st.title("Chat with PDFs")
# Initialize session state for sources if it doesn't exist
if 'sources' not in st.session_state:
st.session_state.sources = None
# Create two columns for chat and sources
col1, col2 = st.columns([2, 1])
with col1:
st.header("Chat")
user_input = st.text_input("Ask a question about the uploaded content:")
if st.button("Send"):
if user_input:
print(f"Sending user input: {user_input}")
st.session_state.chat_progress = st.progress(0)
response, sources = chat_with_ai(user_input)
st.session_state.chat_progress.progress(1.0)
st.markdown("**You:** " + user_input)
st.markdown("**AI:** " + response)
# Store sources in session state for display in col2
st.session_state.sources = sources
st.session_state.chat_progress.empty()
# Clean up state
del st.session_state.chat_progress
else:
print("Empty user input")
st.warning("Please enter a question.")
with col2:
st.header("Source Chunks")
if st.session_state.sources:
for i, source in enumerate(st.session_state.sources, 1):
with st.expander(f"Source {i} - {source['doc_name']}"):
st.markdown(f"**Chunk Index:** {source['chunk_index']}")
st.text(source['text'])
else:
st.info("Ask a question to see source chunks here.")