ombhojane commited on
Commit
00e0c7b
Β·
verified Β·
1 Parent(s): 1f96af4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +142 -142
app.py CHANGED
@@ -1,142 +1,142 @@
1
- import streamlit as st
2
- import google.generativeai as genai
3
- from langchain.document_loaders import PyPDFDirectoryLoader
4
- import os
5
- import shutil
6
-
7
- # Configuration
8
- GOOGLE_API_KEY = "your_api_key_here" # Replace with your actual API key
9
-
10
- # Page configuration
11
- st.set_page_config(page_title="Chat with PDFs", page_icon="πŸ“š")
12
-
13
- def initialize_session_state():
14
- """Initialize session state variables"""
15
- session_state_vars = {
16
- "messages": [],
17
- "loaded_files": False,
18
- "pdf_content": None,
19
- "chat": None
20
- }
21
-
22
- for var, value in session_state_vars.items():
23
- if var not in st.session_state:
24
- st.session_state[var] = value
25
-
26
- def load_pdfs(pdf_folder):
27
- """Load PDFs and return their content"""
28
- if not os.path.exists(pdf_folder):
29
- os.makedirs(pdf_folder)
30
-
31
- loader = PyPDFDirectoryLoader(pdf_folder)
32
- documents = loader.load()
33
-
34
- # Concatenate all documents content
35
- content = "\n\n".join([doc.page_content for doc in documents])
36
- return content
37
-
38
- def initialize_chat(pdf_content):
39
- """Initialize Gemini chat with PDF content"""
40
- genai.configure(api_key="AIzaSyDXxoKVLaal5itTj981_ByYLtxok09MIRM")
41
-
42
- generation_config = {
43
- "temperature": 0.7,
44
- "top_p": 0.95,
45
- "top_k": 40,
46
- "max_output_tokens": 8192,
47
- }
48
-
49
- model = genai.GenerativeModel(
50
- model_name="gemini-1.5-pro",
51
- generation_config=generation_config,
52
- )
53
-
54
- # Start chat with context
55
- context_prompt = f"""You are a helpful assistant that answers questions based on the following document content:
56
-
57
- {pdf_content}
58
-
59
- Please use this content to answer user questions. If the answer cannot be found in the content, say so."""
60
-
61
- chat = model.start_chat(history=[])
62
- # Send initial context
63
- chat.send_message(context_prompt)
64
- return chat
65
-
66
- def main():
67
- initialize_session_state()
68
-
69
- st.title("πŸ’¬ Chat with PDFs")
70
-
71
- # Sidebar for PDF upload
72
- with st.sidebar:
73
- st.header("Upload Documents")
74
- uploaded_files = st.file_uploader(
75
- "Upload your PDFs",
76
- type=["pdf"],
77
- accept_multiple_files=True
78
- )
79
-
80
- if uploaded_files and not st.session_state.loaded_files:
81
- # Create pdfs directory if it doesn't exist
82
- if not os.path.exists("pdfs"):
83
- os.makedirs("pdfs")
84
-
85
- # Clean up old PDF files
86
- for file in os.listdir("pdfs"):
87
- os.remove(os.path.join("pdfs", file))
88
-
89
- # Save uploaded files
90
- for file in uploaded_files:
91
- with open(f"pdfs/{file.name}", "wb") as f:
92
- f.write(file.getvalue())
93
-
94
- # Load PDF content
95
- with st.spinner("Processing PDFs..."):
96
- try:
97
- pdf_content = load_pdfs("pdfs")
98
- st.session_state.pdf_content = pdf_content
99
- st.session_state.loaded_files = True
100
-
101
- # Initialize chat with content
102
- st.session_state.chat = initialize_chat(pdf_content)
103
- except Exception as e:
104
- st.error(f"Error processing PDFs: {str(e)}")
105
- return
106
-
107
- # Main chat interface
108
- if st.session_state.loaded_files:
109
- # Display chat messages
110
- for message in st.session_state.messages:
111
- with st.chat_message(message["role"]):
112
- st.markdown(message["content"])
113
-
114
- # Chat input
115
- if prompt := st.chat_input("Ask a question about your PDFs:"):
116
- # Add user message to chat history
117
- st.session_state.messages.append({"role": "user", "content": prompt})
118
- with st.chat_message("user"):
119
- st.markdown(prompt)
120
-
121
- with st.chat_message("assistant"):
122
- response_placeholder = st.empty()
123
- try:
124
- # Get response from Gemini
125
- if not st.session_state.chat:
126
- st.session_state.chat = initialize_chat(st.session_state.pdf_content)
127
-
128
- response = st.session_state.chat.send_message(prompt)
129
- response_text = response.text
130
-
131
- response_placeholder.markdown(response_text)
132
-
133
- # Add assistant response to chat history
134
- st.session_state.messages.append({"role": "assistant", "content": response_text})
135
- except Exception as e:
136
- response_placeholder.error(f"Error generating response: {str(e)}")
137
-
138
- else:
139
- st.info("Please upload PDFs to start chatting.")
140
-
141
- if __name__ == "__main__":
142
- main()
 
1
+ import streamlit as st
2
+ import google.generativeai as genai
3
+ from langchain.document_loaders import PyPDFDirectoryLoader
4
+ import os
5
+ import shutil
6
+
7
+ # Configuration
8
+ GOOGLE_API_KEY = st.secrets["GOOGLE_API_KEY"]
9
+
10
+ # Page configuration
11
+ st.set_page_config(page_title="Chat with PDFs", page_icon="πŸ“š")
12
+
13
+ def initialize_session_state():
14
+ """Initialize session state variables"""
15
+ session_state_vars = {
16
+ "messages": [],
17
+ "loaded_files": False,
18
+ "pdf_content": None,
19
+ "chat": None
20
+ }
21
+
22
+ for var, value in session_state_vars.items():
23
+ if var not in st.session_state:
24
+ st.session_state[var] = value
25
+
26
+ def load_pdfs(pdf_folder):
27
+ """Load PDFs and return their content"""
28
+ if not os.path.exists(pdf_folder):
29
+ os.makedirs(pdf_folder)
30
+
31
+ loader = PyPDFDirectoryLoader(pdf_folder)
32
+ documents = loader.load()
33
+
34
+ # Concatenate all documents content
35
+ content = "\n\n".join([doc.page_content for doc in documents])
36
+ return content
37
+
38
+ def initialize_chat(pdf_content):
39
+ """Initialize Gemini chat with PDF content"""
40
+ genai.configure(api_key=GOOGLE_API_KEY)
41
+
42
+ generation_config = {
43
+ "temperature": 0.7,
44
+ "top_p": 0.95,
45
+ "top_k": 40,
46
+ "max_output_tokens": 8192,
47
+ }
48
+
49
+ model = genai.GenerativeModel(
50
+ model_name="gemini-1.5-pro",
51
+ generation_config=generation_config,
52
+ )
53
+
54
+ # Start chat with context
55
+ context_prompt = f"""You are a helpful assistant that answers questions based on the following document content:
56
+
57
+ {pdf_content}
58
+
59
+ Please use this content to answer user questions. If the answer cannot be found in the content, say so."""
60
+
61
+ chat = model.start_chat(history=[])
62
+ # Send initial context
63
+ chat.send_message(context_prompt)
64
+ return chat
65
+
66
+ def main():
67
+ initialize_session_state()
68
+
69
+ st.title("πŸ’¬ Chat with PDFs")
70
+
71
+ # Sidebar for PDF upload
72
+ with st.sidebar:
73
+ st.header("Upload Documents")
74
+ uploaded_files = st.file_uploader(
75
+ "Upload your PDFs",
76
+ type=["pdf"],
77
+ accept_multiple_files=True
78
+ )
79
+
80
+ if uploaded_files and not st.session_state.loaded_files:
81
+ # Create pdfs directory if it doesn't exist
82
+ if not os.path.exists("pdfs"):
83
+ os.makedirs("pdfs")
84
+
85
+ # Clean up old PDF files
86
+ for file in os.listdir("pdfs"):
87
+ os.remove(os.path.join("pdfs", file))
88
+
89
+ # Save uploaded files
90
+ for file in uploaded_files:
91
+ with open(f"pdfs/{file.name}", "wb") as f:
92
+ f.write(file.getvalue())
93
+
94
+ # Load PDF content
95
+ with st.spinner("Processing PDFs..."):
96
+ try:
97
+ pdf_content = load_pdfs("pdfs")
98
+ st.session_state.pdf_content = pdf_content
99
+ st.session_state.loaded_files = True
100
+
101
+ # Initialize chat with content
102
+ st.session_state.chat = initialize_chat(pdf_content)
103
+ except Exception as e:
104
+ st.error(f"Error processing PDFs: {str(e)}")
105
+ return
106
+
107
+ # Main chat interface
108
+ if st.session_state.loaded_files:
109
+ # Display chat messages
110
+ for message in st.session_state.messages:
111
+ with st.chat_message(message["role"]):
112
+ st.markdown(message["content"])
113
+
114
+ # Chat input
115
+ if prompt := st.chat_input("Ask a question about your PDFs:"):
116
+ # Add user message to chat history
117
+ st.session_state.messages.append({"role": "user", "content": prompt})
118
+ with st.chat_message("user"):
119
+ st.markdown(prompt)
120
+
121
+ with st.chat_message("assistant"):
122
+ response_placeholder = st.empty()
123
+ try:
124
+ # Get response from Gemini
125
+ if not st.session_state.chat:
126
+ st.session_state.chat = initialize_chat(st.session_state.pdf_content)
127
+
128
+ response = st.session_state.chat.send_message(prompt)
129
+ response_text = response.text
130
+
131
+ response_placeholder.markdown(response_text)
132
+
133
+ # Add assistant response to chat history
134
+ st.session_state.messages.append({"role": "assistant", "content": response_text})
135
+ except Exception as e:
136
+ response_placeholder.error(f"Error generating response: {str(e)}")
137
+
138
+ else:
139
+ st.info("Please upload PDFs to start chatting.")
140
+
141
+ if __name__ == "__main__":
142
+ main()