Pavan178 commited on
Commit
bd0ebd6
·
verified ·
1 Parent(s): 416fe7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -5
app.py CHANGED
@@ -7,7 +7,7 @@ from langchain.vectorstores import FAISS
7
  from langchain.chains import ConversationalRetrievalChain
8
  from langchain.chat_models import ChatOpenAI
9
  from langchain.memory import ConversationBufferMemory
10
- import os
11
  openai_api_key = os.environ.get("OPENAI_API_KEY")
12
 
13
  class AdvancedPdfChatbot:
@@ -18,12 +18,17 @@ class AdvancedPdfChatbot:
18
  self.llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
19
  self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
20
  self.qa_chain = None
 
21
 
22
  def load_and_process_pdf(self, pdf_path):
23
  loader = PyPDFLoader(pdf_path)
24
  documents = loader.load()
25
  texts = self.text_splitter.split_documents(documents)
26
- self.db = FAISS.from_documents(texts, self.embeddings)
 
 
 
 
27
  self.setup_conversation_chain()
28
 
29
  def setup_conversation_chain(self):
@@ -40,14 +45,15 @@ class AdvancedPdfChatbot:
40
  return result['answer']
41
 
42
  # Initialize the chatbot
43
-
44
  pdf_chatbot = AdvancedPdfChatbot(openai_api_key)
45
 
46
  def upload_pdf(pdf_file):
47
  if pdf_file is None:
48
  return "Please upload a PDF file."
49
  file_path = pdf_file.name
 
50
  pdf_chatbot.load_and_process_pdf(file_path)
 
51
  return "PDF uploaded and processed successfully. You can now start chatting!"
52
 
53
  def respond(message, history):
@@ -58,7 +64,7 @@ def respond(message, history):
58
  # Create the Gradio interface
59
  with gr.Blocks() as demo:
60
  gr.Markdown("# PDF Chatbot")
61
-
62
  with gr.Row():
63
  pdf_upload = gr.File(label="Upload PDF", file_types=[".pdf"])
64
  upload_button = gr.Button("Process PDF")
@@ -74,4 +80,4 @@ with gr.Blocks() as demo:
74
  clear.click(lambda: None, None, chatbot_interface, queue=False)
75
 
76
  if __name__ == "__main__":
77
- demo.launch()
 
7
  from langchain.chains import ConversationalRetrievalChain
8
  from langchain.chat_models import ChatOpenAI
9
  from langchain.memory import ConversationBufferMemory
10
+
11
  openai_api_key = os.environ.get("OPENAI_API_KEY")
12
 
13
  class AdvancedPdfChatbot:
 
18
  self.llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
19
  self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
20
  self.qa_chain = None
21
+ self.db = None
22
 
23
  def load_and_process_pdf(self, pdf_path):
24
  loader = PyPDFLoader(pdf_path)
25
  documents = loader.load()
26
  texts = self.text_splitter.split_documents(documents)
27
+ if self.db is None:
28
+ self.db = FAISS.from_documents(texts, self.embeddings)
29
+ else:
30
+ new_db = FAISS.from_documents(texts, self.embeddings)
31
+ self.db.merge_from(new_db)
32
  self.setup_conversation_chain()
33
 
34
  def setup_conversation_chain(self):
 
45
  return result['answer']
46
 
47
  # Initialize the chatbot
 
48
  pdf_chatbot = AdvancedPdfChatbot(openai_api_key)
49
 
50
  def upload_pdf(pdf_file):
51
  if pdf_file is None:
52
  return "Please upload a PDF file."
53
  file_path = pdf_file.name
54
+ pdf_file.save(file_path) # Ensure the file is saved locally
55
  pdf_chatbot.load_and_process_pdf(file_path)
56
+ os.remove(file_path) # Clean up the uploaded file after processing
57
  return "PDF uploaded and processed successfully. You can now start chatting!"
58
 
59
  def respond(message, history):
 
64
  # Create the Gradio interface
65
  with gr.Blocks() as demo:
66
  gr.Markdown("# PDF Chatbot")
67
+
68
  with gr.Row():
69
  pdf_upload = gr.File(label="Upload PDF", file_types=[".pdf"])
70
  upload_button = gr.Button("Process PDF")
 
80
  clear.click(lambda: None, None, chatbot_interface, queue=False)
81
 
82
  if __name__ == "__main__":
83
+ demo.launch()