Update app.py
Browse files
app.py
CHANGED
@@ -68,8 +68,21 @@ class AdvancedPdfChatbot:
|
|
68 |
)
|
69 |
self.overall_chain = self.CustomChain(refinement_chain=refinement_chain, qa_chain=qa_chain)
|
70 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
71 |
class CustomChain(Chain):
|
72 |
-
|
73 |
def __init__(self, refinement_chain, qa_chain):
|
74 |
super().__init__()
|
75 |
self.refinement_chain = refinement_chain
|
@@ -87,34 +100,13 @@ class AdvancedPdfChatbot:
|
|
87 |
query = inputs['query']
|
88 |
chat_history = inputs.get('chat_history', [])
|
89 |
|
90 |
-
# Run the refinement chain to refine the query
|
91 |
refined_query = self.refinement_chain.run({'query': query, 'chat_history': chat_history})
|
92 |
-
|
93 |
-
# Run the QA chain using the refined query and the chat history
|
94 |
response = self.qa_chain({'question': refined_query, 'chat_history': chat_history})
|
95 |
|
96 |
-
# Return the answer
|
97 |
return {"answer": response['answer']}
|
98 |
|
99 |
|
100 |
|
101 |
-
def setup_conversation_chain(self):
|
102 |
-
if not self.db:
|
103 |
-
raise ValueError("Database not initialized. Please upload a PDF first.")
|
104 |
-
|
105 |
-
refinement_chain = LLMChain(
|
106 |
-
llm=self.refinement_llm,
|
107 |
-
prompt=self.refinement_prompt,
|
108 |
-
output_key='refined_query'
|
109 |
-
)
|
110 |
-
qa_chain = ConversationalRetrievalChain.from_llm(
|
111 |
-
self.llm,
|
112 |
-
retriever=self.db.as_retriever(),
|
113 |
-
memory=self.memory,
|
114 |
-
combine_docs_chain_kwargs={"prompt": self.prompt}
|
115 |
-
)
|
116 |
-
self.overall_chain = CustomChain(refinement_chain=refinement_chain, qa_chain=qa_chain)
|
117 |
-
|
118 |
|
119 |
def chat(self, query):
|
120 |
if not self.overall_chain:
|
|
|
68 |
)
|
69 |
self.overall_chain = self.CustomChain(refinement_chain=refinement_chain, qa_chain=qa_chain)
|
70 |
|
71 |
+
def setup_conversation_chain(self):
|
72 |
+
refinement_chain = LLMChain(
|
73 |
+
llm=self.refinement_llm,
|
74 |
+
prompt=self.refinement_prompt,
|
75 |
+
output_key='refined_query'
|
76 |
+
)
|
77 |
+
qa_chain = ConversationalRetrievalChain.from_llm(
|
78 |
+
self.llm,
|
79 |
+
retriever=self.db.as_retriever(),
|
80 |
+
memory=self.memory,
|
81 |
+
combine_docs_chain_kwargs={"prompt": self.prompt}
|
82 |
+
)
|
83 |
+
self.overall_chain = self.CustomChain(refinement_chain=refinement_chain, qa_chain=qa_chain)
|
84 |
+
|
85 |
class CustomChain(Chain):
|
|
|
86 |
def __init__(self, refinement_chain, qa_chain):
|
87 |
super().__init__()
|
88 |
self.refinement_chain = refinement_chain
|
|
|
100 |
query = inputs['query']
|
101 |
chat_history = inputs.get('chat_history', [])
|
102 |
|
|
|
103 |
refined_query = self.refinement_chain.run({'query': query, 'chat_history': chat_history})
|
|
|
|
|
104 |
response = self.qa_chain({'question': refined_query, 'chat_history': chat_history})
|
105 |
|
|
|
106 |
return {"answer": response['answer']}
|
107 |
|
108 |
|
109 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
|
111 |
def chat(self, query):
|
112 |
if not self.overall_chain:
|