File size: 2,106 Bytes
b1b6964
 
8a06a9e
b1b6964
 
4afa186
b1b6964
8a06a9e
 
bd6665e
b1b6964
 
 
 
 
6e1201a
 
 
3f6512f
 
b1b6964
 
3f6512f
b1b6964
6e1201a
3f6512f
b1b6964
 
 
 
 
8a06a9e
b1b6964
6e1201a
b1b6964
3f6512f
 
 
 
 
 
 
 
 
b1b6964
3f6512f
b1b6964
3f6512f
 
 
 
 
8a06a9e
3f6512f
 
 
 
a8e2b6e
3f6512f
 
 
 
b1b6964
8a06a9e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import streamlit as st
import os
import json
from utils.ingestion import DocumentProcessor
from utils.llm import LLMProcessor
from utils.qa import QAEngine

st.set_page_config(page_title="AI-Powered Document QA", layout="wide")
st.title("πŸ“„ AI-Powered Document QA")

# Initialize processors
document_processor = DocumentProcessor()
llm_processor = LLMProcessor()
qa_engine = QAEngine()

# Ensure temp directory exists
os.makedirs("temp", exist_ok=True)

# Sidebar for file upload
st.sidebar.header("πŸ“‚ Upload a PDF")
uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])

# Document upload & processing
if uploaded_file:
    pdf_path = os.path.join("temp", uploaded_file.name)
    
    with open(pdf_path, "wb") as f:
        f.write(uploaded_file.read())

    st.sidebar.success("βœ… File uploaded successfully!")

    with st.spinner("πŸ”„ Processing document..."):
        document_processor.process_document(pdf_path)

    st.sidebar.success("βœ… Document processed successfully!")
    st.session_state["document_uploaded"] = True
else:
    st.session_state["document_uploaded"] = False

# Divider between sections
st.markdown("---")

# Q&A Section
st.header("πŸ” Ask a Question")

question = st.text_input("Ask a question:", placeholder="What are the key insights?")

if st.button("πŸ’‘ Get Answer"):
    if question:
        with st.spinner("🧠 Generating response..."):
            if st.session_state["document_uploaded"]:
                # Use document-based QA if a file is uploaded
                answer = qa_engine.query(question)
            else:
                # Use AI-based response if no document is uploaded
                answer = llm_processor.generate_answer("", question)
                st.warning("⚠️ No document uploaded. This response is generated from general AI knowledge and may not be document-specific.")

        st.subheader("πŸ“ Answer:")
        st.write(answer)
    else:
        st.warning("⚠️ Please enter a question.")

st.markdown("---")
st.caption("πŸ€– Powered by ChromaDB + Groq LLM | Built with ❀️ using Streamlit")