Update app.py
Browse files
app.py
CHANGED
@@ -24,6 +24,7 @@ class AdvancedPdfChatbot:
|
|
24 |
self.llm = ChatOpenAI(temperature=0.5,model_name='gpt-4o',max_tokens=3000)
|
25 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
26 |
self.qa_chain = None
|
|
|
27 |
self.template = """
|
28 |
You are a file-based knowledge assistant that interacts with users like ChatGPT. Your primary source of knowledge comes from user-uploaded files, such as PDFs. You do not rely on general knowledge or the internet. Instead, you extract, analyze, and synthesize information directly from the content of the provided file(s).
|
29 |
**1. Personality and Tone**
|
@@ -81,6 +82,7 @@ NOTE : DESCRIBE/SUMMARY should always return the overall summary of the document
|
|
81 |
texts = self.text_splitter.split_documents(documents)
|
82 |
self.db = FAISS.from_documents(texts, self.embeddings)
|
83 |
self.setup_conversation_chain()
|
|
|
84 |
except Exception as e:
|
85 |
return f"An error occurred while processing the PDF: {e}"
|
86 |
|
@@ -98,6 +100,13 @@ NOTE : DESCRIBE/SUMMARY should always return the overall summary of the document
|
|
98 |
result = self.qa_chain({"question": query})
|
99 |
return result['answer']
|
100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
# Initialize the chatbot
|
102 |
pdf_chatbot = AdvancedPdfChatbot(openai_api_key)
|
103 |
|
@@ -123,6 +132,10 @@ def clear_chatbot():
|
|
123 |
pdf_chatbot.memory.clear()
|
124 |
return []
|
125 |
|
|
|
|
|
|
|
|
|
126 |
# Create the Gradio interface
|
127 |
with gr.Blocks() as demo:
|
128 |
gr.Markdown("# PDF Chatbot")
|
@@ -134,6 +147,9 @@ with gr.Blocks() as demo:
|
|
134 |
upload_status = gr.Textbox(label="Upload Status")
|
135 |
upload_button.click(upload_pdf, inputs=[pdf_upload], outputs=[upload_status])
|
136 |
|
|
|
|
|
|
|
137 |
chatbot_interface = gr.Chatbot()
|
138 |
msg = gr.Textbox()
|
139 |
clear = gr.Button("Clear")
|
|
|
24 |
self.llm = ChatOpenAI(temperature=0.5,model_name='gpt-4o',max_tokens=3000)
|
25 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
26 |
self.qa_chain = None
|
27 |
+
self.pdf_path = None
|
28 |
self.template = """
|
29 |
You are a file-based knowledge assistant that interacts with users like ChatGPT. Your primary source of knowledge comes from user-uploaded files, such as PDFs. You do not rely on general knowledge or the internet. Instead, you extract, analyze, and synthesize information directly from the content of the provided file(s).
|
30 |
**1. Personality and Tone**
|
|
|
82 |
texts = self.text_splitter.split_documents(documents)
|
83 |
self.db = FAISS.from_documents(texts, self.embeddings)
|
84 |
self.setup_conversation_chain()
|
85 |
+
self.pdf_path = pdf_path
|
86 |
except Exception as e:
|
87 |
return f"An error occurred while processing the PDF: {e}"
|
88 |
|
|
|
100 |
result = self.qa_chain({"question": query})
|
101 |
return result['answer']
|
102 |
|
103 |
+
def get_pdf_path(self):
|
104 |
+
# Return the stored PDF path
|
105 |
+
if self.pdf_path:
|
106 |
+
return self.pdf_path
|
107 |
+
else:
|
108 |
+
return "No PDF uploaded yet."
|
109 |
+
|
110 |
# Initialize the chatbot
|
111 |
pdf_chatbot = AdvancedPdfChatbot(openai_api_key)
|
112 |
|
|
|
132 |
pdf_chatbot.memory.clear()
|
133 |
return []
|
134 |
|
135 |
+
def get_pdf_path():
|
136 |
+
# Call the method to return the current PDF path
|
137 |
+
return pdf_chatbot.get_pdf_path()
|
138 |
+
|
139 |
# Create the Gradio interface
|
140 |
with gr.Blocks() as demo:
|
141 |
gr.Markdown("# PDF Chatbot")
|
|
|
147 |
upload_status = gr.Textbox(label="Upload Status")
|
148 |
upload_button.click(upload_pdf, inputs=[pdf_upload], outputs=[upload_status])
|
149 |
|
150 |
+
path_button = gr.Button("Get PDF Path")
|
151 |
+
pdf_path_display = gr.Textbox(label="Current PDF Path")
|
152 |
+
|
153 |
chatbot_interface = gr.Chatbot()
|
154 |
msg = gr.Textbox()
|
155 |
clear = gr.Button("Clear")
|