Spaces:
Sleeping
Sleeping
Update kadi_apy_bot.py
Browse files- kadi_apy_bot.py +38 -26
kadi_apy_bot.py
CHANGED
@@ -10,7 +10,7 @@ class KadiAPYBot:
|
|
10 |
|
11 |
|
12 |
|
13 |
-
def process_query(self, query):
|
14 |
"""
|
15 |
Process a user query, handle history, retrieve contexts, and generate a response.
|
16 |
"""
|
@@ -132,30 +132,7 @@ class KadiAPYBot:
|
|
132 |
context = self.vector_store.similarity_search(query = query, k=k, filter=filter)
|
133 |
return context
|
134 |
|
135 |
-
def
|
136 |
-
formatted_docs = []
|
137 |
-
print("################################# start of doc #######################################")
|
138 |
-
for i, doc in enumerate(documents, start=1):
|
139 |
-
formatted_docs.append(f"Snippet {i}: \n")
|
140 |
-
formatted_docs.append("\n")
|
141 |
-
all_metadata = doc.metadata
|
142 |
-
|
143 |
-
metadata_str = ", ".join(f"{key}: {value}" for key, value in all_metadata.items())
|
144 |
-
print("\n")
|
145 |
-
print("------------------------------Beneath is retrieved doc------------------------------------------------")
|
146 |
-
print(metadata_str)
|
147 |
-
formatted_docs.append(metadata_str)
|
148 |
-
print("\n")
|
149 |
-
formatted_docs.append("\n")
|
150 |
-
formatted_docs.append(doc.page_content)
|
151 |
-
print(doc.page_content)
|
152 |
-
print("\n\n")
|
153 |
-
print("------------------------------End of retrived doc------------------------------------------------")
|
154 |
-
formatted_docs.append("\n\n")
|
155 |
-
|
156 |
-
return formatted_docs
|
157 |
-
|
158 |
-
def generate_response(self, query, doc_context, code_context):
|
159 |
"""
|
160 |
Generate a response using the retrieved contexts and the LLM.
|
161 |
"""
|
@@ -185,4 +162,39 @@ class KadiAPYBot:
|
|
185 |
Query:
|
186 |
{query}
|
187 |
"""
|
188 |
-
return self.llm.invoke(prompt).content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
|
12 |
|
13 |
+
def process_query(self, query, chat_history):
|
14 |
"""
|
15 |
Process a user query, handle history, retrieve contexts, and generate a response.
|
16 |
"""
|
|
|
132 |
context = self.vector_store.similarity_search(query = query, k=k, filter=filter)
|
133 |
return context
|
134 |
|
135 |
+
def generate_response(self, query, history, doc_context, code_context):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
136 |
"""
|
137 |
Generate a response using the retrieved contexts and the LLM.
|
138 |
"""
|
|
|
162 |
Query:
|
163 |
{query}
|
164 |
"""
|
165 |
+
return self.llm.invoke(prompt).content
|
166 |
+
|
167 |
+
|
168 |
+
def format_documents(self, documents):
|
169 |
+
formatted_docs = []
|
170 |
+
print("################################# start of doc #######################################")
|
171 |
+
for i, doc in enumerate(documents, start=1):
|
172 |
+
formatted_docs.append(f"Snippet {i}: \n")
|
173 |
+
formatted_docs.append("\n")
|
174 |
+
all_metadata = doc.metadata
|
175 |
+
|
176 |
+
metadata_str = ", ".join(f"{key}: {value}" for key, value in all_metadata.items())
|
177 |
+
print("\n")
|
178 |
+
print("------------------------------Beneath is retrieved doc------------------------------------------------")
|
179 |
+
print(metadata_str)
|
180 |
+
formatted_docs.append(metadata_str)
|
181 |
+
print("\n")
|
182 |
+
formatted_docs.append("\n")
|
183 |
+
formatted_docs.append(doc.page_content)
|
184 |
+
print(doc.page_content)
|
185 |
+
print("\n\n")
|
186 |
+
print("------------------------------End of retrived doc------------------------------------------------")
|
187 |
+
formatted_docs.append("\n\n")
|
188 |
+
|
189 |
+
return formatted_docs
|
190 |
+
|
191 |
+
def format_history(conversation_history):
|
192 |
+
formatted_history = []
|
193 |
+
for i, entry in enumerate(conversation_history, start=1):
|
194 |
+
user_query = entry.get("query", "No query provided")
|
195 |
+
assistant_response = entry.get("response", "No response yet") # Updated label
|
196 |
+
formatted_history.append(f"Turn {i}:")
|
197 |
+
formatted_history.append(f"User Query: {user_query}")
|
198 |
+
formatted_history.append(f"Assistant Response: {assistant_response}") # Using "Assistant Response"
|
199 |
+
formatted_history.append("\n")
|
200 |
+
return "\n".join(formatted_history)
|