Update app.py
Browse files
app.py
CHANGED
@@ -18,17 +18,12 @@ class AdvancedPdfChatbot:
|
|
18 |
self.llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
19 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
20 |
self.qa_chain = None
|
21 |
-
self.db = None
|
22 |
|
23 |
def load_and_process_pdf(self, pdf_path):
|
24 |
loader = PyPDFLoader(pdf_path)
|
25 |
documents = loader.load()
|
26 |
texts = self.text_splitter.split_documents(documents)
|
27 |
-
|
28 |
-
self.db = FAISS.from_documents(texts, self.embeddings)
|
29 |
-
else:
|
30 |
-
new_db = FAISS.from_documents(texts, self.embeddings)
|
31 |
-
self.db.merge_from(new_db)
|
32 |
self.setup_conversation_chain()
|
33 |
|
34 |
def setup_conversation_chain(self):
|
@@ -51,9 +46,7 @@ def upload_pdf(pdf_file):
|
|
51 |
if pdf_file is None:
|
52 |
return "Please upload a PDF file."
|
53 |
file_path = pdf_file.name
|
54 |
-
pdf_file.save(file_path) # Ensure the file is saved locally
|
55 |
pdf_chatbot.load_and_process_pdf(file_path)
|
56 |
-
os.remove(file_path) # Clean up the uploaded file after processing
|
57 |
return "PDF uploaded and processed successfully. You can now start chatting!"
|
58 |
|
59 |
def respond(message, history):
|
@@ -61,10 +54,14 @@ def respond(message, history):
|
|
61 |
history.append((message, bot_message))
|
62 |
return "", history
|
63 |
|
|
|
|
|
|
|
|
|
64 |
# Create the Gradio interface
|
65 |
with gr.Blocks() as demo:
|
66 |
gr.Markdown("# PDF Chatbot")
|
67 |
-
|
68 |
with gr.Row():
|
69 |
pdf_upload = gr.File(label="Upload PDF", file_types=[".pdf"])
|
70 |
upload_button = gr.Button("Process PDF")
|
@@ -77,7 +74,7 @@ with gr.Blocks() as demo:
|
|
77 |
clear = gr.Button("Clear")
|
78 |
|
79 |
msg.submit(respond, inputs=[msg, chatbot_interface], outputs=[msg, chatbot_interface])
|
80 |
-
clear.click(
|
81 |
|
82 |
if __name__ == "__main__":
|
83 |
demo.launch()
|
|
|
18 |
self.llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
19 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
20 |
self.qa_chain = None
|
|
|
21 |
|
22 |
def load_and_process_pdf(self, pdf_path):
|
23 |
loader = PyPDFLoader(pdf_path)
|
24 |
documents = loader.load()
|
25 |
texts = self.text_splitter.split_documents(documents)
|
26 |
+
self.db = FAISS.from_documents(texts, self.embeddings)
|
|
|
|
|
|
|
|
|
27 |
self.setup_conversation_chain()
|
28 |
|
29 |
def setup_conversation_chain(self):
|
|
|
46 |
if pdf_file is None:
|
47 |
return "Please upload a PDF file."
|
48 |
file_path = pdf_file.name
|
|
|
49 |
pdf_chatbot.load_and_process_pdf(file_path)
|
|
|
50 |
return "PDF uploaded and processed successfully. You can now start chatting!"
|
51 |
|
52 |
def respond(message, history):
|
|
|
54 |
history.append((message, bot_message))
|
55 |
return "", history
|
56 |
|
57 |
+
def clear_chatbot():
|
58 |
+
pdf_chatbot.memory.clear()
|
59 |
+
return []
|
60 |
+
|
61 |
# Create the Gradio interface
|
62 |
with gr.Blocks() as demo:
|
63 |
gr.Markdown("# PDF Chatbot")
|
64 |
+
|
65 |
with gr.Row():
|
66 |
pdf_upload = gr.File(label="Upload PDF", file_types=[".pdf"])
|
67 |
upload_button = gr.Button("Process PDF")
|
|
|
74 |
clear = gr.Button("Clear")
|
75 |
|
76 |
msg.submit(respond, inputs=[msg, chatbot_interface], outputs=[msg, chatbot_interface])
|
77 |
+
clear.click(clear_chatbot, outputs=[chatbot_interface])
|
78 |
|
79 |
if __name__ == "__main__":
|
80 |
demo.launch()
|