Update app.py
Browse files
app.py
CHANGED
@@ -17,7 +17,7 @@ class AdvancedPdfChatbot:
|
|
17 |
os.environ["OPENAI_API_KEY"] = openai_api_key
|
18 |
self.embeddings = OpenAIEmbeddings()
|
19 |
self.text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
|
20 |
-
self.llm = ChatOpenAI(temperature=0, model_name='gpt-4')
|
21 |
self.refinement_llm = ChatOpenAI(temperature=0, model_name='gpt-3.5-turbo')
|
22 |
|
23 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
@@ -52,6 +52,9 @@ class AdvancedPdfChatbot:
|
|
52 |
self.setup_conversation_chain()
|
53 |
|
54 |
def setup_conversation_chain(self):
|
|
|
|
|
|
|
55 |
refinement_chain = LLMChain(
|
56 |
llm=self.refinement_llm,
|
57 |
prompt=self.refinement_prompt,
|
@@ -82,9 +85,9 @@ class AdvancedPdfChatbot:
|
|
82 |
def _call(self, inputs):
|
83 |
query = inputs['query']
|
84 |
chat_history = inputs.get('chat_history', [])
|
85 |
-
refined_query = self.refinement_chain.run(query
|
86 |
-
response = self.qa_chain({"question": refined_query, "chat_history": chat_history})
|
87 |
-
return {"answer": response
|
88 |
|
89 |
def chat(self, query):
|
90 |
if not self.overall_chain:
|
@@ -105,14 +108,22 @@ pdf_chatbot = AdvancedPdfChatbot(openai_api_key)
|
|
105 |
def upload_pdf(pdf_file):
|
106 |
if pdf_file is None:
|
107 |
return "Please upload a PDF file."
|
108 |
-
file_path = pdf_file.name
|
109 |
-
|
110 |
-
|
|
|
|
|
|
|
111 |
|
112 |
def respond(message, history):
|
113 |
-
|
114 |
-
|
115 |
-
|
|
|
|
|
|
|
|
|
|
|
116 |
|
117 |
def clear_chatbot():
|
118 |
pdf_chatbot.memory.clear()
|
@@ -131,15 +142,17 @@ with gr.Blocks() as demo:
|
|
131 |
|
132 |
upload_status = gr.Textbox(label="Upload Status")
|
133 |
upload_button.click(upload_pdf, inputs=[pdf_upload], outputs=[upload_status])
|
|
|
134 |
path_button = gr.Button("Get PDF Path")
|
135 |
pdf_path_display = gr.Textbox(label="Current PDF Path")
|
|
|
136 |
chatbot_interface = gr.Chatbot()
|
137 |
-
msg = gr.Textbox()
|
138 |
-
clear = gr.Button("Clear")
|
139 |
|
140 |
msg.submit(respond, inputs=[msg, chatbot_interface], outputs=[msg, chatbot_interface])
|
141 |
clear.click(clear_chatbot, outputs=[chatbot_interface])
|
142 |
path_button.click(get_pdf_path, outputs=[pdf_path_display])
|
143 |
|
144 |
if __name__ == "__main__":
|
145 |
-
demo.launch()
|
|
|
17 |
os.environ["OPENAI_API_KEY"] = openai_api_key
|
18 |
self.embeddings = OpenAIEmbeddings()
|
19 |
self.text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
|
20 |
+
self.llm = ChatOpenAI(temperature=0, model_name='gpt-4')
|
21 |
self.refinement_llm = ChatOpenAI(temperature=0, model_name='gpt-3.5-turbo')
|
22 |
|
23 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
|
|
52 |
self.setup_conversation_chain()
|
53 |
|
54 |
def setup_conversation_chain(self):
|
55 |
+
if not self.db:
|
56 |
+
raise ValueError("Database not initialized. Please upload a PDF first.")
|
57 |
+
|
58 |
refinement_chain = LLMChain(
|
59 |
llm=self.refinement_llm,
|
60 |
prompt=self.refinement_prompt,
|
|
|
85 |
def _call(self, inputs):
|
86 |
query = inputs['query']
|
87 |
chat_history = inputs.get('chat_history', [])
|
88 |
+
refined_query = self.refinement_chain.run({'query': query, 'chat_history': chat_history})
|
89 |
+
response = self.qa_chain.run({"question": refined_query, "chat_history": chat_history})
|
90 |
+
return {"answer": response}
|
91 |
|
92 |
def chat(self, query):
|
93 |
if not self.overall_chain:
|
|
|
108 |
def upload_pdf(pdf_file):
|
109 |
if pdf_file is None:
|
110 |
return "Please upload a PDF file."
|
111 |
+
file_path = pdf_file.name if hasattr(pdf_file, 'name') else pdf_file
|
112 |
+
try:
|
113 |
+
pdf_chatbot.load_and_process_pdf(file_path)
|
114 |
+
return f"PDF processed successfully: {file_path}"
|
115 |
+
except Exception as e:
|
116 |
+
return f"Error processing PDF: {str(e)}"
|
117 |
|
118 |
def respond(message, history):
|
119 |
+
if not message:
|
120 |
+
return "", history
|
121 |
+
try:
|
122 |
+
bot_message = pdf_chatbot.chat(message)
|
123 |
+
history.append((message, bot_message))
|
124 |
+
return "", history
|
125 |
+
except Exception as e:
|
126 |
+
return f"Error: {str(e)}", history
|
127 |
|
128 |
def clear_chatbot():
|
129 |
pdf_chatbot.memory.clear()
|
|
|
142 |
|
143 |
upload_status = gr.Textbox(label="Upload Status")
|
144 |
upload_button.click(upload_pdf, inputs=[pdf_upload], outputs=[upload_status])
|
145 |
+
|
146 |
path_button = gr.Button("Get PDF Path")
|
147 |
pdf_path_display = gr.Textbox(label="Current PDF Path")
|
148 |
+
|
149 |
chatbot_interface = gr.Chatbot()
|
150 |
+
msg = gr.Textbox(placeholder="Type your question here...")
|
151 |
+
clear = gr.Button("Clear Chat")
|
152 |
|
153 |
msg.submit(respond, inputs=[msg, chatbot_interface], outputs=[msg, chatbot_interface])
|
154 |
clear.click(clear_chatbot, outputs=[chatbot_interface])
|
155 |
path_button.click(get_pdf_path, outputs=[pdf_path_display])
|
156 |
|
157 |
if __name__ == "__main__":
|
158 |
+
demo.launch()
|