Spaces:
Building
Building
Update chat_handler.py
Browse files- chat_handler.py +40 -40
chat_handler.py
CHANGED
@@ -11,7 +11,7 @@ from pydantic import BaseModel
|
|
11 |
import requests
|
12 |
|
13 |
from prompt_builder import build_intent_prompt, build_parameter_prompt
|
14 |
-
from
|
15 |
from api_executor import call_api as execute_api
|
16 |
from config_provider import ConfigProvider
|
17 |
from validation_engine import validate
|
@@ -48,9 +48,9 @@ def _safe_intent_parse(raw: str) -> tuple[str, str]:
|
|
48 |
# Remove 'assistant' suffix if exists
|
49 |
if name.endswith("assistant"):
|
50 |
name = name[:-9] # Remove last 9 chars ("assistant")
|
51 |
-
|
52 |
tail = raw[m.end():]
|
53 |
-
|
54 |
return name, tail
|
55 |
|
56 |
# βββββββββββββββββββββββββ LLM SETUP βββββββββββββββββββββββββ #
|
@@ -61,9 +61,9 @@ def setup_llm_provider():
|
|
61 |
try:
|
62 |
from llm_factory import LLMFactory
|
63 |
llm_provider = LLMFactory.create_provider()
|
64 |
-
|
65 |
except Exception as e:
|
66 |
-
|
67 |
raise
|
68 |
|
69 |
# βββββββββββββββββββββββββ LLM GENERATION βββββββββββββββββββββββββ #
|
@@ -86,8 +86,8 @@ async def llm_generate(s: Session, prompt: str, user_msg: str) -> str:
|
|
86 |
if not version:
|
87 |
raise ValueError("No published version found")
|
88 |
|
89 |
-
|
90 |
-
|
91 |
|
92 |
# Call the configured LLM provider
|
93 |
raw = await llm_provider.generate(
|
@@ -96,14 +96,14 @@ async def llm_generate(s: Session, prompt: str, user_msg: str) -> str:
|
|
96 |
context=s.chat_history[-10:] if s.chat_history else []
|
97 |
)
|
98 |
|
99 |
-
|
100 |
return raw
|
101 |
|
102 |
except requests.exceptions.Timeout:
|
103 |
-
|
104 |
raise HTTPException(status_code=504, detail="LLM request timed out")
|
105 |
except Exception as e:
|
106 |
-
|
107 |
raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}")
|
108 |
|
109 |
# βββββββββββββββββββββββββ PARAMETER EXTRACTION βββββββββββββββββββββββββ #
|
@@ -117,7 +117,7 @@ def _extract_parameters_from_response(raw: str, session: Session, intent_config)
|
|
117 |
json_match = re.search(r'\{[^}]+\}', raw)
|
118 |
|
119 |
if not json_match:
|
120 |
-
|
121 |
return False
|
122 |
|
123 |
json_str = json_match.group(1) if '```' in raw else json_match.group(0)
|
@@ -132,25 +132,25 @@ def _extract_parameters_from_response(raw: str, session: Session, intent_config)
|
|
132 |
)
|
133 |
|
134 |
if not param_config:
|
135 |
-
|
136 |
continue
|
137 |
|
138 |
# Validate parameter
|
139 |
if validate(str(param_value), param_config):
|
140 |
session.variables[param_config.variable_name] = str(param_value)
|
141 |
any_valid = True
|
142 |
-
|
143 |
else:
|
144 |
-
|
145 |
|
146 |
return any_valid
|
147 |
|
148 |
except json.JSONDecodeError as e:
|
149 |
-
|
150 |
-
|
151 |
return False
|
152 |
except Exception as e:
|
153 |
-
|
154 |
return False
|
155 |
|
156 |
# βββββββββββββββββββββββββ API EXECUTION βββββββββββββββββββββββββ #
|
@@ -162,17 +162,17 @@ async def _execute_api_call(session: Session, intent_config) -> str:
|
|
162 |
api_config = cfg.get_api(api_name)
|
163 |
|
164 |
if not api_config:
|
165 |
-
|
166 |
session.reset_flow()
|
167 |
return intent_config.fallback_error_prompt or "Δ°Εlem baΕarΔ±sΔ±z oldu."
|
168 |
|
169 |
-
|
170 |
-
|
171 |
|
172 |
# Execute API call with session
|
173 |
response = execute_api(api_config, session)
|
174 |
api_json = response.json()
|
175 |
-
|
176 |
|
177 |
# Humanize response
|
178 |
session.state = "humanize"
|
@@ -189,11 +189,11 @@ async def _execute_api_call(session: Session, intent_config) -> str:
|
|
189 |
return f"Δ°Εlem tamamlandΔ±: {api_json}"
|
190 |
|
191 |
except requests.exceptions.Timeout:
|
192 |
-
|
193 |
session.reset_flow()
|
194 |
return intent_config.fallback_timeout_prompt or "Δ°Εlem zaman aΕΔ±mΔ±na uΔradΔ±."
|
195 |
except Exception as e:
|
196 |
-
|
197 |
session.reset_flow()
|
198 |
return intent_config.fallback_error_prompt or "Δ°Εlem sΔ±rasΔ±nda bir hata oluΕtu."
|
199 |
|
@@ -240,7 +240,7 @@ async def start_session(req: StartRequest):
|
|
240 |
if not llm_provider:
|
241 |
from llm_factory import LLMFactory
|
242 |
llm_provider = LLMFactory.create_provider()
|
243 |
-
|
244 |
|
245 |
# Create session with version config
|
246 |
session_id = session_store.create(req.project_name, version)
|
@@ -249,7 +249,7 @@ async def start_session(req: StartRequest):
|
|
249 |
# Welcome prompt'u iΕle
|
250 |
greeting = "HoΕ geldiniz! Size nasΔ±l yardΔ±mcΔ± olabilirim?"
|
251 |
if version.welcome_prompt:
|
252 |
-
|
253 |
try:
|
254 |
# Welcome prompt'u LLM'e gΓΆnder
|
255 |
welcome_result = await llm_provider.generate(
|
@@ -260,19 +260,19 @@ async def start_session(req: StartRequest):
|
|
260 |
if welcome_result and welcome_result.strip():
|
261 |
greeting = welcome_result.strip()
|
262 |
except Exception as e:
|
263 |
-
|
264 |
# Fallback to default greeting
|
265 |
|
266 |
session.add_turn("assistant", greeting)
|
267 |
|
268 |
-
|
269 |
|
270 |
return ChatResponse(session_id=session.session_id, answer=greeting)
|
271 |
|
272 |
except HTTPException:
|
273 |
raise
|
274 |
except Exception as e:
|
275 |
-
|
276 |
raise HTTPException(500, f"Session creation failed: {str(e)}")
|
277 |
|
278 |
@router.post("/chat")
|
@@ -286,7 +286,7 @@ async def chat(req: ChatRequest, x_session_id: str = Header(...)):
|
|
286 |
|
287 |
# Add user message to history
|
288 |
session.add_message("user", req.message)
|
289 |
-
|
290 |
|
291 |
# Get project and version config
|
292 |
project = next((p for p in cfg.projects if p.name == session.project_name), None)
|
@@ -314,11 +314,11 @@ async def chat(req: ChatRequest, x_session_id: str = Header(...)):
|
|
314 |
session.current_intent = intent_name
|
315 |
session.intent_config = intent_config
|
316 |
session.state = "collect_params"
|
317 |
-
|
318 |
|
319 |
# Check if parameters were already extracted
|
320 |
if tail and _extract_parameters_from_response(tail, session, intent_config):
|
321 |
-
|
322 |
|
323 |
# Check what parameters are missing
|
324 |
missing_params = [
|
@@ -344,7 +344,7 @@ async def chat(req: ChatRequest, x_session_id: str = Header(...)):
|
|
344 |
session.add_message("assistant", clean_question)
|
345 |
return {"response": clean_question, "intent": intent_name, "state": "collecting_params"}
|
346 |
else:
|
347 |
-
|
348 |
|
349 |
# No intent detected, return general response
|
350 |
clean_response = _trim_response(raw)
|
@@ -405,7 +405,7 @@ async def chat(req: ChatRequest, x_session_id: str = Header(...)):
|
|
405 |
except HTTPException:
|
406 |
raise
|
407 |
except Exception as e:
|
408 |
-
|
409 |
import traceback
|
410 |
traceback.print_exc()
|
411 |
raise HTTPException(status_code=500, detail=str(e))
|
@@ -416,7 +416,7 @@ async def handle_new_message(session: Session, user_input: str) -> str:
|
|
416 |
# Get version config from session
|
417 |
version = session.get_version_config()
|
418 |
if not version:
|
419 |
-
|
420 |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin."
|
421 |
|
422 |
# Get project config
|
@@ -432,7 +432,7 @@ async def handle_new_message(session: Session, user_input: str) -> str:
|
|
432 |
|
433 |
# Empty response fallback
|
434 |
if not raw:
|
435 |
-
|
436 |
return "ΓzgΓΌnΓΌm, mesajΔ±nΔ±zΔ± anlayamadΔ±m. LΓΌtfen tekrar dener misiniz?"
|
437 |
|
438 |
# Check for intent
|
@@ -446,11 +446,11 @@ async def handle_new_message(session: Session, user_input: str) -> str:
|
|
446 |
session.current_intent = intent_name
|
447 |
session.intent_config = intent_config
|
448 |
session.state = "collect_params"
|
449 |
-
|
450 |
|
451 |
# Check if parameters were already extracted
|
452 |
if tail and _extract_parameters_from_response(tail, session, intent_config):
|
453 |
-
|
454 |
|
455 |
# Check what parameters are missing
|
456 |
missing_params = [
|
@@ -476,14 +476,14 @@ async def handle_new_message(session: Session, user_input: str) -> str:
|
|
476 |
return _trim_response(raw)
|
477 |
|
478 |
except Exception as e:
|
479 |
-
|
480 |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin."
|
481 |
|
482 |
async def handle_parameter_followup(session: Session, user_input: str) -> str:
|
483 |
"""Handle parameter collection followup - for WebSocket"""
|
484 |
try:
|
485 |
if not session.intent_config:
|
486 |
-
|
487 |
session.reset_flow()
|
488 |
return "ΓzgΓΌnΓΌm, hangi iΕlem iΓ§in bilgi istediΔimi unuttum. BaΕtan baΕlayalΔ±m."
|
489 |
|
@@ -533,7 +533,7 @@ async def handle_parameter_followup(session: Session, user_input: str) -> str:
|
|
533 |
return _trim_response(param_question)
|
534 |
|
535 |
except Exception as e:
|
536 |
-
|
537 |
session.reset_flow()
|
538 |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin."
|
539 |
|
|
|
11 |
import requests
|
12 |
|
13 |
from prompt_builder import build_intent_prompt, build_parameter_prompt
|
14 |
+
from logger import log_info, log_error, log_warning, log_debug
|
15 |
from api_executor import call_api as execute_api
|
16 |
from config_provider import ConfigProvider
|
17 |
from validation_engine import validate
|
|
|
48 |
# Remove 'assistant' suffix if exists
|
49 |
if name.endswith("assistant"):
|
50 |
name = name[:-9] # Remove last 9 chars ("assistant")
|
51 |
+
log_info(f"π§ Removed 'assistant' suffix from intent name")
|
52 |
tail = raw[m.end():]
|
53 |
+
log_info(f"π― Parsed intent: {name}")
|
54 |
return name, tail
|
55 |
|
56 |
# βββββββββββββββββββββββββ LLM SETUP βββββββββββββββββββββββββ #
|
|
|
61 |
try:
|
62 |
from llm_factory import LLMFactory
|
63 |
llm_provider = LLMFactory.create_provider()
|
64 |
+
log_info("β
LLM provider initialized successfully")
|
65 |
except Exception as e:
|
66 |
+
log_error("β Failed to initialize LLM provider", e)
|
67 |
raise
|
68 |
|
69 |
# βββββββββββββββββββββββββ LLM GENERATION βββββββββββββββββββββββββ #
|
|
|
86 |
if not version:
|
87 |
raise ValueError("No published version found")
|
88 |
|
89 |
+
log_info(f"π Calling LLM for session {s.session_id[:8]}...")
|
90 |
+
log_info(f"π Prompt preview (first 200 chars): {prompt[:200]}...")
|
91 |
|
92 |
# Call the configured LLM provider
|
93 |
raw = await llm_provider.generate(
|
|
|
96 |
context=s.chat_history[-10:] if s.chat_history else []
|
97 |
)
|
98 |
|
99 |
+
log_info(f"πͺ LLM raw response: {raw[:100]}...")
|
100 |
return raw
|
101 |
|
102 |
except requests.exceptions.Timeout:
|
103 |
+
log_warning(f"β±οΈ LLM timeout for session {s.session_id[:8]}")
|
104 |
raise HTTPException(status_code=504, detail="LLM request timed out")
|
105 |
except Exception as e:
|
106 |
+
log_error("β LLM error", e)
|
107 |
raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}")
|
108 |
|
109 |
# βββββββββββββββββββββββββ PARAMETER EXTRACTION βββββββββββββββββββββββββ #
|
|
|
117 |
json_match = re.search(r'\{[^}]+\}', raw)
|
118 |
|
119 |
if not json_match:
|
120 |
+
log_info("β No JSON found in response")
|
121 |
return False
|
122 |
|
123 |
json_str = json_match.group(1) if '```' in raw else json_match.group(0)
|
|
|
132 |
)
|
133 |
|
134 |
if not param_config:
|
135 |
+
log_info(f"β οΈ Parameter config not found for: {param_name}")
|
136 |
continue
|
137 |
|
138 |
# Validate parameter
|
139 |
if validate(str(param_value), param_config):
|
140 |
session.variables[param_config.variable_name] = str(param_value)
|
141 |
any_valid = True
|
142 |
+
log_info(f"β
Extracted {param_name}={param_value} β {param_config.variable_name}")
|
143 |
else:
|
144 |
+
log_info(f"β Invalid {param_name}={param_value}")
|
145 |
|
146 |
return any_valid
|
147 |
|
148 |
except json.JSONDecodeError as e:
|
149 |
+
log_error("β JSON parsing error", e)
|
150 |
+
log_error(f"β Failed to parse: {raw[:200]}")
|
151 |
return False
|
152 |
except Exception as e:
|
153 |
+
log_error("β Parameter processing error", e)
|
154 |
return False
|
155 |
|
156 |
# βββββββββββββββββββββββββ API EXECUTION βββββββββββββββββββββββββ #
|
|
|
162 |
api_config = cfg.get_api(api_name)
|
163 |
|
164 |
if not api_config:
|
165 |
+
log_info(f"β API config not found: {api_name}")
|
166 |
session.reset_flow()
|
167 |
return intent_config.fallback_error_prompt or "Δ°Εlem baΕarΔ±sΔ±z oldu."
|
168 |
|
169 |
+
log_info(f"π‘ Calling API: {api_name}")
|
170 |
+
log_info(f"π¦ API variables: {session.variables}")
|
171 |
|
172 |
# Execute API call with session
|
173 |
response = execute_api(api_config, session)
|
174 |
api_json = response.json()
|
175 |
+
log_info(f"β
API response: {api_json}")
|
176 |
|
177 |
# Humanize response
|
178 |
session.state = "humanize"
|
|
|
189 |
return f"Δ°Εlem tamamlandΔ±: {api_json}"
|
190 |
|
191 |
except requests.exceptions.Timeout:
|
192 |
+
log_warning(f"β±οΈ API timeout: {api_name}")
|
193 |
session.reset_flow()
|
194 |
return intent_config.fallback_timeout_prompt or "Δ°Εlem zaman aΕΔ±mΔ±na uΔradΔ±."
|
195 |
except Exception as e:
|
196 |
+
log_error("β API call error", e)
|
197 |
session.reset_flow()
|
198 |
return intent_config.fallback_error_prompt or "Δ°Εlem sΔ±rasΔ±nda bir hata oluΕtu."
|
199 |
|
|
|
240 |
if not llm_provider:
|
241 |
from llm_factory import LLMFactory
|
242 |
llm_provider = LLMFactory.create_provider()
|
243 |
+
log_info(f"π€ LLM Provider created: {type(llm_provider).__name__}")
|
244 |
|
245 |
# Create session with version config
|
246 |
session_id = session_store.create(req.project_name, version)
|
|
|
249 |
# Welcome prompt'u iΕle
|
250 |
greeting = "HoΕ geldiniz! Size nasΔ±l yardΔ±mcΔ± olabilirim?"
|
251 |
if version.welcome_prompt:
|
252 |
+
log_info(f"π Processing welcome prompt for session {session_id[:8]}...")
|
253 |
try:
|
254 |
# Welcome prompt'u LLM'e gΓΆnder
|
255 |
welcome_result = await llm_provider.generate(
|
|
|
260 |
if welcome_result and welcome_result.strip():
|
261 |
greeting = welcome_result.strip()
|
262 |
except Exception as e:
|
263 |
+
log_error("β οΈ Welcome prompt processing failed", e)
|
264 |
# Fallback to default greeting
|
265 |
|
266 |
session.add_turn("assistant", greeting)
|
267 |
|
268 |
+
log_info(f"β
Session created for project '{req.project_name}' version {version.no} (highest published)")
|
269 |
|
270 |
return ChatResponse(session_id=session.session_id, answer=greeting)
|
271 |
|
272 |
except HTTPException:
|
273 |
raise
|
274 |
except Exception as e:
|
275 |
+
log_error("β Session creation error", e)
|
276 |
raise HTTPException(500, f"Session creation failed: {str(e)}")
|
277 |
|
278 |
@router.post("/chat")
|
|
|
286 |
|
287 |
# Add user message to history
|
288 |
session.add_message("user", req.message)
|
289 |
+
log_info(f"π¬ User [{session.session_id[:8]}...]: {req.message}")
|
290 |
|
291 |
# Get project and version config
|
292 |
project = next((p for p in cfg.projects if p.name == session.project_name), None)
|
|
|
314 |
session.current_intent = intent_name
|
315 |
session.intent_config = intent_config
|
316 |
session.state = "collect_params"
|
317 |
+
log_info(f"π― Intent detected: {intent_name}")
|
318 |
|
319 |
# Check if parameters were already extracted
|
320 |
if tail and _extract_parameters_from_response(tail, session, intent_config):
|
321 |
+
log_info("π¦ Some parameters extracted from initial response")
|
322 |
|
323 |
# Check what parameters are missing
|
324 |
missing_params = [
|
|
|
344 |
session.add_message("assistant", clean_question)
|
345 |
return {"response": clean_question, "intent": intent_name, "state": "collecting_params"}
|
346 |
else:
|
347 |
+
log_info(f"β οΈ Unknown intent: {intent_name}")
|
348 |
|
349 |
# No intent detected, return general response
|
350 |
clean_response = _trim_response(raw)
|
|
|
405 |
except HTTPException:
|
406 |
raise
|
407 |
except Exception as e:
|
408 |
+
log_error("β Chat error", e)
|
409 |
import traceback
|
410 |
traceback.print_exc()
|
411 |
raise HTTPException(status_code=500, detail=str(e))
|
|
|
416 |
# Get version config from session
|
417 |
version = session.get_version_config()
|
418 |
if not version:
|
419 |
+
log_info("β Version config not found")
|
420 |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin."
|
421 |
|
422 |
# Get project config
|
|
|
432 |
|
433 |
# Empty response fallback
|
434 |
if not raw:
|
435 |
+
log_info("β οΈ Empty response from LLM")
|
436 |
return "ΓzgΓΌnΓΌm, mesajΔ±nΔ±zΔ± anlayamadΔ±m. LΓΌtfen tekrar dener misiniz?"
|
437 |
|
438 |
# Check for intent
|
|
|
446 |
session.current_intent = intent_name
|
447 |
session.intent_config = intent_config
|
448 |
session.state = "collect_params"
|
449 |
+
log_info(f"π― Intent detected: {intent_name}")
|
450 |
|
451 |
# Check if parameters were already extracted
|
452 |
if tail and _extract_parameters_from_response(tail, session, intent_config):
|
453 |
+
log_info("π¦ Some parameters extracted from initial response")
|
454 |
|
455 |
# Check what parameters are missing
|
456 |
missing_params = [
|
|
|
476 |
return _trim_response(raw)
|
477 |
|
478 |
except Exception as e:
|
479 |
+
log_error("β Error in handle_new_message", e)
|
480 |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin."
|
481 |
|
482 |
async def handle_parameter_followup(session: Session, user_input: str) -> str:
|
483 |
"""Handle parameter collection followup - for WebSocket"""
|
484 |
try:
|
485 |
if not session.intent_config:
|
486 |
+
log_info("β οΈ No intent config in session")
|
487 |
session.reset_flow()
|
488 |
return "ΓzgΓΌnΓΌm, hangi iΕlem iΓ§in bilgi istediΔimi unuttum. BaΕtan baΕlayalΔ±m."
|
489 |
|
|
|
533 |
return _trim_response(param_question)
|
534 |
|
535 |
except Exception as e:
|
536 |
+
log_error("β Error in handle_parameter_followup", e)
|
537 |
session.reset_flow()
|
538 |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin."
|
539 |
|