NEXAS commited on
Commit
a8e2b6e
Β·
verified Β·
1 Parent(s): bd6665e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -68
app.py CHANGED
@@ -1,62 +1,12 @@
1
  import streamlit as st
2
  import os
3
- import time
4
  from utils.ingestion import DocumentProcessor
5
  from utils.llm import LLMProcessor
6
  from utils.qa import QAEngine
7
 
8
- # Set up Streamlit page with modern UI
9
  st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")
10
 
11
- # CSS for better UI styling
12
- st.markdown(
13
- """
14
- <style>
15
- .chat-container {
16
- max-width: 800px;
17
- margin: auto;
18
- }
19
- .chat-bubble {
20
- padding: 12px;
21
- border-radius: 12px;
22
- margin-bottom: 10px;
23
- max-width: 75%;
24
- }
25
- .user-bubble {
26
- background-color: #0078ff;
27
- color: white;
28
- text-align: right;
29
- margin-left: auto;
30
- }
31
- .ai-bubble {
32
- background-color: #f1f1f1;
33
- color: black;
34
- }
35
- .chat-input-container {
36
- position: fixed;
37
- bottom: 10px;
38
- width: 100%;
39
- max-width: 800px;
40
- background: white;
41
- padding: 10px;
42
- box-shadow: 0 -2px 10px rgba(0,0,0,0.1);
43
- border-radius: 8px;
44
- }
45
- .chat-input {
46
- width: 100%;
47
- padding: 10px;
48
- border-radius: 8px;
49
- border: 1px solid #ccc;
50
- }
51
- .spinner {
52
- text-align: center;
53
- padding: 20px;
54
- }
55
- </style>
56
- """,
57
- unsafe_allow_html=True
58
- )
59
-
60
  # Initialize processors
61
  document_processor = DocumentProcessor()
62
  llm_processor = LLMProcessor()
@@ -66,7 +16,7 @@ qa_engine = QAEngine()
66
  os.makedirs("temp", exist_ok=True)
67
 
68
  # Sidebar - File Upload
69
- st.sidebar.header("πŸ“‚ Upload a PDF")
70
  uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
71
 
72
  if uploaded_file:
@@ -86,35 +36,30 @@ if uploaded_file:
86
  if "chat_history" not in st.session_state:
87
  st.session_state.chat_history = []
88
 
89
- # Chat UI container
90
  st.title("πŸ’¬ AI-Powered Document Chat")
 
91
  chat_container = st.container()
92
 
93
  with chat_container:
94
- st.markdown('<div class="chat-container">', unsafe_allow_html=True)
95
- for role, text in st.session_state.chat_history:
96
  if role == "user":
97
- st.markdown(f'<div class="chat-bubble user-bubble">{text}</div>', unsafe_allow_html=True)
98
  else:
99
- st.markdown(f'<div class="chat-bubble ai-bubble">{text}</div>', unsafe_allow_html=True)
100
- st.markdown('</div>', unsafe_allow_html=True)
101
 
102
- # User Input at the bottom with fixed position
103
- question = st.text_input(
104
- "Ask a question:",
105
- placeholder="Type your question and press Enter...",
106
- key="user_input"
107
- )
108
 
109
  if question:
110
- # Append user question to chat history
111
  st.session_state.chat_history.append(("user", question))
112
 
113
- with st.spinner('<div class="spinner">...</div>'):
114
- time.sleep(0.5) # Simulating delay
115
  answer = qa_engine.query(question)
116
 
117
- # Append AI answer to chat history
118
  st.session_state.chat_history.append(("ai", answer))
119
 
120
  # Rerun the app to update chat history
 
1
  import streamlit as st
2
  import os
 
3
  from utils.ingestion import DocumentProcessor
4
  from utils.llm import LLMProcessor
5
  from utils.qa import QAEngine
6
 
7
+ # Set up Streamlit page
8
  st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  # Initialize processors
11
  document_processor = DocumentProcessor()
12
  llm_processor = LLMProcessor()
 
16
  os.makedirs("temp", exist_ok=True)
17
 
18
  # Sidebar - File Upload
19
+ st.sidebar.header("Upload a PDF")
20
  uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
21
 
22
  if uploaded_file:
 
36
  if "chat_history" not in st.session_state:
37
  st.session_state.chat_history = []
38
 
39
+ # Display chat history
40
  st.title("πŸ’¬ AI-Powered Document Chat")
41
+
42
  chat_container = st.container()
43
 
44
  with chat_container:
45
+ for message in st.session_state.chat_history:
46
+ role, text = message
47
  if role == "user":
48
+ st.markdown(f"**πŸ§‘β€πŸ’» You:** {text}")
49
  else:
50
+ st.markdown(f"**πŸ€– AI:** {text}")
 
51
 
52
+ # User Input at the bottom
53
+ question = st.text_input("Ask a question:", placeholder="Type your question and press Enter...", key="user_input")
 
 
 
 
54
 
55
  if question:
56
+ # Append user question to history
57
  st.session_state.chat_history.append(("user", question))
58
 
59
+ with st.spinner(""):
 
60
  answer = qa_engine.query(question)
61
 
62
+ # Append AI answer to history
63
  st.session_state.chat_history.append(("ai", answer))
64
 
65
  # Rerun the app to update chat history