Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -170,6 +170,28 @@ References: {', '.join(section.get('metadata', {}).get('references', [])) or 'No
|
|
170 |
logger.error(f"Error formatting section: {str(e)}")
|
171 |
return str(section)
|
172 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
173 |
def search_sections(self, query: str, progress=gr.Progress()) -> Tuple[str, str]:
|
174 |
self.is_searching = True
|
175 |
start_time = time.time()
|
@@ -198,8 +220,8 @@ References: {', '.join(section.get('metadata', {}).get('references', [])) or 'No
|
|
198 |
|
199 |
raw_results.append(self.format_section(result))
|
200 |
context_parts.append(f"""
|
201 |
-
Section {result.get('section_number')}: {result.get('title')}
|
202 |
-
{result.get('content', '')}
|
203 |
""")
|
204 |
progress((0.3 + (idx * 0.1)), desc=f"Processing result {idx + 1} of {len(search_results)}...")
|
205 |
|
@@ -209,13 +231,7 @@ Section {result.get('section_number')}: {result.get('title')}
|
|
209 |
progress(0.8, desc="Generating AI interpretation...")
|
210 |
context = "\n\n".join(context_parts)
|
211 |
|
212 |
-
|
213 |
-
ai_response = chain.invoke({
|
214 |
-
"context": context,
|
215 |
-
"chat_history": self.chat_history,
|
216 |
-
"question": query
|
217 |
-
})
|
218 |
-
|
219 |
self.chat_history += f"\nUser: {query}\nAI: {ai_response}\n"
|
220 |
|
221 |
elapsed_time = time.time() - start_time
|
|
|
170 |
logger.error(f"Error formatting section: {str(e)}")
|
171 |
return str(section)
|
172 |
|
173 |
+
def generate_ai_response(self, context: str, query: str) -> str:
|
174 |
+
"""Generate AI interpretation with error handling"""
|
175 |
+
try:
|
176 |
+
chain = self.prompt | self.llm
|
177 |
+
response = chain.invoke({
|
178 |
+
"context": context,
|
179 |
+
"chat_history": self.chat_history,
|
180 |
+
"question": query
|
181 |
+
})
|
182 |
+
|
183 |
+
# Handle different response types
|
184 |
+
if isinstance(response, dict):
|
185 |
+
return response.get('text', str(response))
|
186 |
+
elif isinstance(response, list):
|
187 |
+
return response[0] if response else "No response generated"
|
188 |
+
else:
|
189 |
+
return str(response)
|
190 |
+
|
191 |
+
except Exception as e:
|
192 |
+
logger.error(f"Error generating AI response: {str(e)}")
|
193 |
+
return "I apologize, but I encountered an error while interpreting the legal sections. Please try rephrasing your question."
|
194 |
+
|
195 |
def search_sections(self, query: str, progress=gr.Progress()) -> Tuple[str, str]:
|
196 |
self.is_searching = True
|
197 |
start_time = time.time()
|
|
|
220 |
|
221 |
raw_results.append(self.format_section(result))
|
222 |
context_parts.append(f"""
|
223 |
+
Section {result.get('section_number', 'N/A')}: {result.get('title', 'N/A')}
|
224 |
+
{result.get('content', 'N/A')}
|
225 |
""")
|
226 |
progress((0.3 + (idx * 0.1)), desc=f"Processing result {idx + 1} of {len(search_results)}...")
|
227 |
|
|
|
231 |
progress(0.8, desc="Generating AI interpretation...")
|
232 |
context = "\n\n".join(context_parts)
|
233 |
|
234 |
+
ai_response = self.generate_ai_response(context, query)
|
|
|
|
|
|
|
|
|
|
|
|
|
235 |
self.chat_history += f"\nUser: {query}\nAI: {ai_response}\n"
|
236 |
|
237 |
elapsed_time = time.time() - start_time
|