NEXAS commited on
Commit
8a06a9e
Β·
verified Β·
1 Parent(s): 7a013c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -32
app.py CHANGED
@@ -1,11 +1,12 @@
1
  import streamlit as st
2
  import os
 
3
  from utils.ingestion import DocumentProcessor
4
  from utils.llm import LLMProcessor
5
  from utils.qa import QAEngine
6
 
7
- # Set up Streamlit page
8
- st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")
9
 
10
  # Initialize processors
11
  document_processor = DocumentProcessor()
@@ -15,7 +16,6 @@ qa_engine = QAEngine()
15
  # Ensure temp directory exists
16
  os.makedirs("temp", exist_ok=True)
17
 
18
- # Sidebar - File Upload
19
  st.sidebar.header("Upload a PDF")
20
  uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
21
 
@@ -27,40 +27,23 @@ if uploaded_file:
27
 
28
  st.sidebar.success("βœ… File uploaded successfully!")
29
 
30
- with st.spinner(""):
31
  document_processor.process_document(pdf_path)
32
 
33
  st.sidebar.success("βœ… Document processed successfully!")
34
 
35
- # Initialize chat history in session state
36
- if "chat_history" not in st.session_state:
37
- st.session_state.chat_history = []
38
 
39
- # Display chat history
40
- st.title("πŸ’¬ AI-Powered Document Chat")
 
 
41
 
42
- chat_container = st.container()
43
-
44
- with chat_container:
45
- for message in st.session_state.chat_history:
46
- role, text = message
47
- if role == "user":
48
- st.markdown(f"**πŸ§‘β€πŸ’» You:** {text}")
49
  else:
50
- st.markdown(f"**πŸ€– AI:** {text}")
51
-
52
- # User Input at the bottom
53
- question = st.text_input("Ask a question:", placeholder="Type your question and press Enter...", key="user_input")
54
-
55
- if question:
56
- # Append user question to history
57
- st.session_state.chat_history.append(("user", question))
58
-
59
- with st.spinner(""):
60
- answer = qa_engine.query(question)
61
-
62
- # Append AI answer to history
63
- st.session_state.chat_history.append(("ai", answer))
64
 
65
- # Rerun the app to update chat history
66
- st.rerun()
 
1
  import streamlit as st
2
  import os
3
+ import json
4
  from utils.ingestion import DocumentProcessor
5
  from utils.llm import LLMProcessor
6
  from utils.qa import QAEngine
7
 
8
+ st.set_page_config(page_title="AI-Powered Document QA", layout="wide")
9
+ st.title("πŸ“„ AI-Powered Document QA")
10
 
11
  # Initialize processors
12
  document_processor = DocumentProcessor()
 
16
  # Ensure temp directory exists
17
  os.makedirs("temp", exist_ok=True)
18
 
 
19
  st.sidebar.header("Upload a PDF")
20
  uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
21
 
 
27
 
28
  st.sidebar.success("βœ… File uploaded successfully!")
29
 
30
+ with st.spinner("πŸ”„ Processing document..."):
31
  document_processor.process_document(pdf_path)
32
 
33
  st.sidebar.success("βœ… Document processed successfully!")
34
 
35
+ # Query input
36
+ question = st.text_input("Ask a question from the document:", placeholder="What are the key insights?")
 
37
 
38
+ if st.button("πŸ” Search & Answer"):
39
+ if question:
40
+ with st.spinner("🧠 Searching for relevant context..."):
41
+ answer = qa_engine.query(question)
42
 
43
+ st.subheader("πŸ“ Answer:")
44
+ st.write(answer)
 
 
 
 
 
45
  else:
46
+ st.warning("⚠️ Please enter a question.")
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
+ st.markdown("---")
49
+ st.caption("πŸ€– Powered by ChromaDB + Groq LLM | Built with ❀️ using Streamlit")