File size: 1,516 Bytes
b1b6964
 
8a06a9e
b1b6964
 
4afa186
b1b6964
8a06a9e
 
bd6665e
b1b6964
 
 
 
 
6e1201a
 
 
a8e2b6e
b1b6964
 
 
6e1201a
 
b1b6964
 
 
 
 
8a06a9e
b1b6964
6e1201a
b1b6964
 
8a06a9e
 
b1b6964
8a06a9e
 
 
 
a8e2b6e
8a06a9e
db1aee6
b1b6964
8a06a9e
b1b6964
8a06a9e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import streamlit as st
import os
import json
from utils.ingestion import DocumentProcessor
from utils.llm import LLMProcessor
from utils.qa import QAEngine

st.set_page_config(page_title="AI-Powered Document QA", layout="wide")
st.title("πŸ“„ AI-Powered Document QA")

# Initialize processors
document_processor = DocumentProcessor()
llm_processor = LLMProcessor()
qa_engine = QAEngine()

# Ensure temp directory exists
os.makedirs("temp", exist_ok=True)

st.sidebar.header("Upload a PDF")
uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])

if uploaded_file:
    pdf_path = os.path.join("temp", uploaded_file.name)

    with open(pdf_path, "wb") as f:
        f.write(uploaded_file.read())

    st.sidebar.success("βœ… File uploaded successfully!")

    with st.spinner("πŸ”„ Processing document..."):
        document_processor.process_document(pdf_path)

    st.sidebar.success("βœ… Document processed successfully!")

    # Query input
    question = st.text_input("Ask a question from the document:", placeholder="What are the key insights?")

    if st.button("πŸ” Search & Answer"):
        if question:
            with st.spinner("🧠 Searching for relevant context..."):
                answer = qa_engine.query(question)

            st.subheader("πŸ“ Answer:")
            st.write(answer.content)
        else:
            st.warning("⚠️ Please enter a question.")

st.markdown("---")
st.caption("πŸ€– Powered by ChromaDB + Groq LLM | Built with ❀️ using Streamlit")