ciyidogan commited on
Commit
9aa9170
Β·
verified Β·
1 Parent(s): 80b3562

Delete chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +0 -720
chat_handler.py DELETED
@@ -1,720 +0,0 @@
1
- """
2
- Flare – Chat Handler (Refactored with LLM Factory)
3
- ==========================================
4
- """
5
-
6
- import re, json, sys, httpx, os
7
- from datetime import datetime
8
- from typing import Dict, List, Optional, Any
9
- from fastapi import APIRouter, HTTPException, Header
10
- from pydantic import BaseModel
11
- import requests
12
-
13
- from prompt_builder import build_intent_prompt, build_parameter_prompt
14
- from logger import log_info, log_error, log_warning, log_debug
15
- from api_executor import call_api as execute_api
16
- from config_provider import ConfigProvider
17
- from validation_engine import validate
18
- from session import session_store, Session
19
-
20
- # Initialize router
21
- router = APIRouter()
22
-
23
- # ───────────────────────── GLOBAL VARS ───────────────────────── #
24
- cfg = ConfigProvider.get()
25
- llm_provider = None
26
-
27
- # ───────────────────────── HELPERS ───────────────────────── #
28
- def _trim_response(raw: str) -> str:
29
- """
30
- Remove everything after the first logical assistant block or intent tag.
31
- Also strips trailing 'assistant' artifacts and prompt injections.
32
- """
33
- # Stop at our own rules if model leaked them
34
- for stop in ["#DETECTED_INTENT", "⚠️", "\nassistant", "assistant\n", "assistant"]:
35
- idx = raw.find(stop)
36
- if idx != -1:
37
- raw = raw[:idx]
38
- # Normalise selamlama
39
- raw = re.sub(r"Hoş[\s-]?geldin(iz)?", "Hoş geldiniz", raw, flags=re.IGNORECASE)
40
- return raw.strip()
41
-
42
- def _safe_intent_parse(raw: str) -> tuple[str, str]:
43
- """Extract intent name and extra tail."""
44
- m = re.search(r"#DETECTED_INTENT:\s*([A-Za-z0-9_-]+)", raw)
45
- if not m:
46
- return "", raw
47
- name = m.group(1)
48
- # Remove 'assistant' suffix if exists
49
- if name.endswith("assistant"):
50
- name = name[:-9] # Remove last 9 chars ("assistant")
51
- log_info(f"πŸ”§ Removed 'assistant' suffix from intent name")
52
- tail = raw[m.end():]
53
- log_info(f"🎯 Parsed intent: {name}")
54
- return name, tail
55
-
56
- # ───────────────────────── LLM SETUP ───────────────────────── #
57
- def setup_llm_provider():
58
- """Initialize LLM provider using factory pattern"""
59
- global llm_provider
60
-
61
- try:
62
- from llm_factory import LLMFactory
63
- llm_provider = LLMFactory.create_provider()
64
- log_info("βœ… LLM provider initialized successfully")
65
- except Exception as e:
66
- log_error("❌ Failed to initialize LLM provider", e)
67
- raise
68
-
69
- # ───────────────────────── LLM GENERATION ───────────────────────── #
70
- async def llm_generate(s: Session, prompt: str, user_msg: str) -> str:
71
- """Call LLM provider with proper error handling"""
72
- global llm_provider
73
-
74
- if llm_provider is None:
75
- setup_llm_provider()
76
-
77
- try:
78
- # Get version config from session
79
- version = s.get_version_config()
80
- if not version:
81
- # Fallback: get from project config
82
- project = next((p for p in cfg.projects if p.name == s.project_name), None)
83
- if not project:
84
- raise ValueError(f"Project not found: {s.project_name}")
85
- version = next((v for v in project.versions if v.published), None)
86
- if not version:
87
- raise ValueError("No published version found")
88
-
89
- log_info(f"πŸš€ Calling LLM for session {s.session_id[:8]}...")
90
- log_info(f"πŸ“‹ Prompt preview (first 200 chars): {prompt[:200]}...")
91
-
92
- history = s.chat_history
93
-
94
- # Call the configured LLM provider
95
- raw = await llm_provider.generate(
96
- user_input=user_msg,
97
- system_prompt=prompt,
98
- context=history[-10:] if history else []
99
- )
100
-
101
- log_info(f"πŸͺ„ LLM raw response: {raw[:100]}...")
102
- return raw
103
-
104
- except requests.exceptions.Timeout:
105
- log_warning(f"⏱️ LLM timeout for session {s.session_id[:8]}")
106
- raise HTTPException(status_code=504, detail="LLM request timed out")
107
- except Exception as e:
108
- log_error("❌ LLM error", e)
109
- raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}")
110
-
111
- # ───────────────────────── PARAMETER EXTRACTION ───────────────────────── #
112
- def _extract_parameters_from_response(raw: str, session: Session, intent_config) -> bool:
113
- """Extract parameters from the LLM response"""
114
- try:
115
- # Look for JSON block in response
116
- json_match = re.search(r'```json\s*(.*?)\s*```', raw, re.DOTALL)
117
- if not json_match:
118
- # Try to find JSON without code block
119
- json_match = re.search(r'\{[^}]+\}', raw)
120
-
121
- if not json_match:
122
- log_info("❌ No JSON found in response")
123
- return False
124
-
125
- json_str = json_match.group(1) if '```' in raw else json_match.group(0)
126
- params = json.loads(json_str)
127
-
128
- any_valid = False
129
- for param_name, param_value in params.items():
130
- # Find parameter config
131
- param_config = next(
132
- (p for p in intent_config.parameters if p.name == param_name),
133
- None
134
- )
135
-
136
- if not param_config:
137
- log_info(f"⚠️ Parameter config not found for: {param_name}")
138
- continue
139
-
140
- # Validate parameter
141
- if validate(str(param_value), param_config):
142
- session.variables[param_config.variable_name] = str(param_value)
143
- any_valid = True
144
- log_info(f"βœ… Extracted {param_name}={param_value} β†’ {param_config.variable_name}")
145
- else:
146
- log_info(f"❌ Invalid {param_name}={param_value}")
147
-
148
- return any_valid
149
-
150
- except json.JSONDecodeError as e:
151
- log_error("❌ JSON parsing error", e)
152
- log_error(f"❌ Failed to parse: {raw[:200]}")
153
- return False
154
- except Exception as e:
155
- log_error("❌ Parameter processing error", e)
156
- return False
157
-
158
- # ───────────────────────── API EXECUTION ───────────────────────── #
159
- async def _execute_api_call(session: Session, intent_config) -> str:
160
- """Execute API call and return humanized response with better error handling"""
161
- try:
162
- session.state = "call_api"
163
- api_name = intent_config.action
164
- api_config = cfg.get_api(api_name)
165
-
166
- if not api_config:
167
- log_info(f"❌ API config not found: {api_name}")
168
- session.reset_flow()
169
- return get_user_friendly_error("api_error", {"api_name": api_name})
170
-
171
- log_info(f"πŸ“‘ Calling API: {api_name}")
172
- log_info(f"πŸ“¦ API variables: {session.variables}")
173
-
174
- # Execute API call with session
175
- response = execute_api(api_config, session)
176
- api_json = response.json()
177
- log_info(f"βœ… API response: {api_json}")
178
-
179
- # Humanize response
180
- session.state = "humanize"
181
- if api_config.response_prompt:
182
- prompt = api_config.response_prompt.replace(
183
- "{{api_response}}",
184
- json.dumps(api_json, ensure_ascii=False)
185
- )
186
- human_response = await llm_generate(session, prompt, json.dumps(api_json))
187
- session.reset_flow()
188
- return human_response if human_response else f"İşlem sonucu: {api_json}"
189
- else:
190
- session.reset_flow()
191
- return f"İşlem tamamlandı: {api_json}"
192
-
193
- except requests.exceptions.Timeout:
194
- log_warning(f"⏱️ API timeout: {api_name}")
195
- session.reset_flow()
196
- return get_user_friendly_error("api_timeout")
197
- except Exception as e:
198
- log_error("❌ API call error", e)
199
- session.reset_flow()
200
- return get_user_friendly_error("api_error", {"api_name": api_name})
201
-
202
- # ───────────────────────── REQUEST MODELS ───────────────────────── #
203
- class ChatRequest(BaseModel):
204
- message: str
205
-
206
- class StartRequest(BaseModel):
207
- project_name: str
208
- version_no: Optional[int] = None # Opsiyonel, belirtilmezse published olan en bΓΌyΓΌk version no'yu kullan
209
- is_realtime: bool = False
210
- locale: Optional[str] = None
211
-
212
- class ChatResponse(BaseModel):
213
- session_id: str
214
- answer: str
215
-
216
- # ───────────────────────── API ENDPOINTS ───────────────────────── #
217
- @router.post("/start_session", response_model=ChatResponse)
218
- async def start_session(req: StartRequest):
219
- """Create new session"""
220
- global llm_provider
221
-
222
- try:
223
- # Validate project exists
224
- project = next((p for p in cfg.projects if p.name == req.project_name and p.enabled), None)
225
- if not project:
226
- raise HTTPException(404, f"Project '{req.project_name}' not found or disabled")
227
-
228
- # Determine locale
229
- session_locale = req.locale
230
- if not session_locale:
231
- # Use project's default locale
232
- session_locale = project.default_locale
233
-
234
- # Validate locale is supported by project
235
- if session_locale not in project.supported_locales:
236
- raise HTTPException(
237
- 400,
238
- f"Locale '{session_locale}' not supported by project. Supported: {project.supported_locales}"
239
- )
240
-
241
- # Find version
242
- if req.version_no:
243
- # Specific version requested
244
- version = next((v for v in project.versions if v.no == req.version_no), None)
245
- if not version:
246
- raise HTTPException(404, f"Version {req.version_no} not found for project '{req.project_name}'")
247
- else:
248
- # Find published version with highest version number
249
- published_versions = [v for v in project.versions if v.published]
250
- if not published_versions:
251
- raise HTTPException(404, f"No published version for project '{req.project_name}'")
252
-
253
- # Sort by version number (no) and get the highest
254
- version = max(published_versions, key=lambda v: v.no)
255
-
256
- # Create LLM provider if not exists
257
- if not llm_provider:
258
- from llm_factory import LLMFactory
259
- llm_provider = LLMFactory.create_provider()
260
- log_info(f"πŸ€– LLM Provider created: {type(llm_provider).__name__}")
261
-
262
- # Create session with version config - PARAMETRE DÜZELTMESİ
263
- session = session_store.create_session(
264
- project_name=req.project_name,
265
- version_no=version.no,
266
- is_realtime=req.is_realtime,
267
- locale=session_locale
268
- )
269
-
270
- # Version config'i session'a ekle
271
- session.set_version_config(version)
272
-
273
- # Welcome prompt'u işle
274
- greeting = "Hoş geldiniz! Size nasıl yardımcı olabilirim?"
275
- if version.welcome_prompt:
276
- log_info(f"πŸŽ‰ Processing welcome prompt for session {session.session_id[:8]}...")
277
- try:
278
- # Welcome prompt'u LLM'e gΓΆnder
279
- welcome_result = await llm_provider.generate(
280
- user_input="",
281
- system_prompt=version.welcome_prompt,
282
- context=[]
283
- )
284
- if welcome_result and welcome_result.strip():
285
- greeting = welcome_result.strip()
286
- except Exception as e:
287
- log_error("⚠️ Welcome prompt processing failed", e)
288
- # Fallback to default greeting
289
-
290
- session.add_turn("assistant", greeting)
291
-
292
- log_info(f"βœ… Session created for project '{req.project_name}' version {version.no} (highest published)")
293
-
294
- return ChatResponse(session_id=session.session_id, answer=greeting)
295
-
296
- except HTTPException:
297
- raise
298
- except Exception as e:
299
- log_error("❌ Session creation error", e)
300
- raise HTTPException(500, f"Session creation failed: {str(e)}")
301
-
302
- @router.post("/chat")
303
- async def chat(req: ChatRequest, x_session_id: str = Header(...)):
304
- """Process chat message"""
305
- try:
306
- # Get session
307
- session = session_store.get_session(x_session_id)
308
- if not session:
309
- # Better error message
310
- raise HTTPException(
311
- status_code=404,
312
- detail=get_user_friendly_error("session_not_found")
313
- )
314
-
315
- # Session expiry check ekle
316
- if session.is_expired():
317
- session_store.delete_session(x_session_id)
318
- raise HTTPException(
319
- status_code=401,
320
- detail=get_user_friendly_error("session_expired")
321
- )
322
-
323
- # Update last activity
324
- session.last_activity = datetime.utcnow().isoformat()
325
- session_store.update_session(session)
326
-
327
- # Mevcut kod devam ediyor...
328
- # Add user message to history
329
- session.add_message("user", req.message)
330
- log_info(f"πŸ’¬ User [{session.session_id[:8]}...]: {req.message}")
331
-
332
- # Get project and version config
333
- project = next((p for p in cfg.projects if p.name == session.project_name), None)
334
- if not project:
335
- raise HTTPException(
336
- status_code=404,
337
- detail=get_user_friendly_error("project_not_found")
338
- )
339
-
340
- version = session.get_version_config()
341
- if not version:
342
- raise HTTPException(
343
- status_code=400,
344
- detail=get_user_friendly_error("version_not_found")
345
- )
346
-
347
- # Process based on current state
348
- if session.state == "idle":
349
- # Build intent detection prompt
350
- prompt = build_intent_prompt(version, session.chat_history, project.default_locale)
351
- raw = await llm_generate(session, prompt, req.message)
352
-
353
- # Check for intent
354
- intent_name, tail = _safe_intent_parse(raw)
355
-
356
- if intent_name:
357
- # Find intent config
358
- intent_config = next((i for i in version.intents if i.name == intent_name), None)
359
-
360
- if intent_config:
361
- session.current_intent = intent_name
362
- session.intent_config = intent_config
363
- session.state = "collect_params"
364
- log_info(f"🎯 Intent detected: {intent_name}")
365
-
366
- # Check if parameters were already extracted
367
- if tail and _extract_parameters_from_response(tail, session, intent_config):
368
- log_info("πŸ“¦ Some parameters extracted from initial response")
369
-
370
- # Check what parameters are missing
371
- missing_params = [
372
- p.name for p in intent_config.parameters
373
- if p.required and p.variable_name not in session.variables
374
- ]
375
-
376
- if not missing_params:
377
- # All required parameters collected, execute API
378
- response = await _execute_api_call(session, intent_config)
379
- session.add_message("assistant", response)
380
- return {"response": response, "intent": intent_name, "state": "completed"}
381
- else:
382
- # Need to collect more parameters
383
- # Get parameter collection config
384
- collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {})
385
- max_params = collection_config.get("max_params_per_question", 2)
386
-
387
- # Decide which parameters to ask
388
- params_to_ask = missing_params[:max_params]
389
-
390
- param_prompt = build_parameter_prompt(
391
- version=version,
392
- intent_config=intent_config,
393
- chat_history=session.chat_history,
394
- collected_params=session.variables,
395
- missing_params=missing_params,
396
- params_to_ask=params_to_ask,
397
- max_params=max_params,
398
- project_locale=project.default_locale,
399
- unanswered_params=session.unanswered_parameters
400
- )
401
-
402
- param_question = await llm_generate(session, param_prompt, req.message)
403
- clean_question = _trim_response(param_question)
404
- session.add_message("assistant", clean_question)
405
- return {"response": clean_question, "intent": intent_name, "state": "collecting_params"}
406
-
407
- else:
408
- log_info(f"⚠️ Unknown intent: {intent_name}")
409
-
410
- # No intent detected, return general response
411
- clean_response = _trim_response(raw)
412
- session.add_message("assistant", clean_response)
413
- return {"response": clean_response, "state": "idle"}
414
-
415
- elif session.state == "collect_params":
416
- # Continue parameter collection
417
- intent_config = session.intent_config
418
-
419
- # Try to extract parameters from user message
420
- param_prompt = f"""
421
- Extract parameters from user message: "{req.message}"
422
-
423
- Expected parameters:
424
- {json.dumps([{
425
- 'name': p.name,
426
- 'type': p.type,
427
- 'required': p.required,
428
- 'extraction_prompt': p.extraction_prompt
429
- } for p in intent_config.parameters if p.variable_name not in session.variables], ensure_ascii=False)}
430
-
431
- Return as JSON object with parameter names as keys.
432
- """
433
-
434
- raw = await llm_generate(session, param_prompt, req.message)
435
- _extract_parameters_from_response(raw, session, intent_config)
436
-
437
- # Check what parameters are still missing
438
- missing_params = [
439
- p.name for p in intent_config.parameters
440
- if p.required and p.variable_name not in session.variables
441
- ]
442
-
443
- if not missing_params:
444
- # All parameters collected, execute API
445
- response = await _execute_api_call(session, intent_config)
446
- session.add_message("assistant", response)
447
- return {"response": response, "intent": session.current_intent, "state": "completed"}
448
- else:
449
- # Still need more parameters
450
- # Get parameter collection config
451
- collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {})
452
- max_params = collection_config.get("max_params_per_question", 2)
453
-
454
- # Decide which parameters to ask
455
- params_to_ask = missing_params[:max_params]
456
-
457
- param_prompt = build_parameter_prompt(
458
- version=version,
459
- intent_config=intent_config,
460
- chat_history=session.chat_history,
461
- collected_params=session.variables,
462
- missing_params=missing_params,
463
- params_to_ask=params_to_ask,
464
- max_params=max_params,
465
- project_locale=project.default_locale,
466
- unanswered_params=session.unanswered_parameters
467
- )
468
- param_question = await llm_generate(session, param_prompt, req.message)
469
- clean_question = _trim_response(param_question)
470
- session.add_message("assistant", clean_question)
471
- return {"response": clean_question, "intent": session.current_intent, "state": "collecting_params"}
472
-
473
- else:
474
- # Unknown state, reset
475
- session.reset_flow()
476
- return {"response": get_user_friendly_error("internal_error"), "state": "error"}
477
-
478
- except HTTPException:
479
- raise
480
- except requests.exceptions.Timeout:
481
- # Better timeout error
482
- log_error(f"Timeout in chat for session {x_session_id[:8]}")
483
- return {
484
- "response": get_user_friendly_error("llm_timeout"),
485
- "state": "error",
486
- "error": True
487
- }
488
- except Exception as e:
489
- log_error("❌ Chat error", e)
490
- import traceback
491
- traceback.print_exc()
492
- # Better generic error
493
- return {
494
- "response": get_user_friendly_error("internal_error"),
495
- "state": "error",
496
- "error": True
497
- }
498
-
499
- async def handle_new_message(session: Session, user_input: str) -> str:
500
- """Handle new message (not parameter followup) - for WebSocket"""
501
- try:
502
- # Get version config from session
503
- version = session.get_version_config()
504
- if not version:
505
- log_info("❌ Version config not found")
506
- return "Bir hata oluştu. Lütfen tekrar deneyin."
507
-
508
- # Get project config
509
- project = next((p for p in cfg.projects if p.name == session.project_name), None)
510
- if not project:
511
- return "Proje konfigΓΌrasyonu bulunamadΔ±."
512
-
513
- # Build intent detection prompt
514
- prompt = build_intent_prompt(version, session.chat_history, project.default_locale)
515
-
516
- # Get LLM response
517
- raw = await llm_generate(session, prompt, user_input)
518
-
519
- # Empty response fallback
520
- if not raw:
521
- log_info("⚠️ Empty response from LLM")
522
- return "Üzgünüm, mesajınızı anlayamadım. Lütfen tekrar dener misiniz?"
523
-
524
- # Check for intent
525
- intent_name, tail = _safe_intent_parse(raw)
526
-
527
- if intent_name:
528
- # Find intent config
529
- intent_config = next((i for i in version.intents if i.name == intent_name), None)
530
-
531
- if intent_config:
532
- session.current_intent = intent_name
533
- session.intent_config = intent_config
534
- session.state = "collect_params"
535
- log_info(f"🎯 Intent detected: {intent_name}")
536
-
537
- # Check if parameters were already extracted
538
- if tail and _extract_parameters_from_response(tail, session, intent_config):
539
- log_info("πŸ“¦ Some parameters extracted from initial response")
540
-
541
- # Check what parameters are missing
542
- missing_params = [
543
- p.name for p in intent_config.parameters
544
- if p.required and p.variable_name not in session.variables
545
- ]
546
-
547
- if not missing_params:
548
- # All required parameters collected, execute API
549
- return await _execute_api_call(session, intent_config)
550
- else:
551
- # Need to collect more parameters
552
- collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {})
553
- max_params = collection_config.get("max_params_per_question", 2)
554
-
555
- # Decide which parameters to ask
556
- params_to_ask = missing_params[:max_params]
557
-
558
- param_prompt = build_parameter_prompt(
559
- version=version,
560
- intent_config=intent_config,
561
- chat_history=session.chat_history,
562
- collected_params=session.variables,
563
- missing_params=missing_params,
564
- params_to_ask=params_to_ask,
565
- max_params=max_params,
566
- project_locale=project.default_locale,
567
- unanswered_params=session.unanswered_parameters
568
- )
569
- param_question = await llm_generate(session, param_prompt, user_input)
570
- return _trim_response(param_question)
571
-
572
- # No intent detected, return general response
573
- return _trim_response(raw)
574
-
575
- except Exception as e:
576
- log_error("❌ Error in handle_new_message", e)
577
- return "Bir hata oluştu. Lütfen tekrar deneyin."
578
-
579
- async def handle_parameter_followup(session: Session, user_input: str) -> str:
580
- """Handle parameter collection followup - for WebSocket"""
581
- try:
582
- if not session.intent_config:
583
- log_info("⚠️ No intent config in session")
584
- session.reset_flow()
585
- return "Üzgünüm, hangi işlem için bilgi istediğimi unuttum. Baştan başlayalım."
586
-
587
- intent_config = session.intent_config
588
-
589
- # Get project config and version
590
- project = next((p for p in cfg.projects if p.name == session.project_name), None)
591
- if not project:
592
- return "Proje konfigΓΌrasyonu bulunamadΔ±."
593
-
594
- version = session.get_version_config()
595
- if not version:
596
- return "Versiyon konfigΓΌrasyonu bulunamadΔ±."
597
-
598
- # Try to extract parameters from user message
599
- param_prompt = f"""
600
- Extract parameters from user message: "{user_input}"
601
-
602
- Expected parameters:
603
- {json.dumps([{
604
- 'name': p.name,
605
- 'type': p.type,
606
- 'required': p.required,
607
- 'extraction_prompt': p.extraction_prompt
608
- } for p in intent_config.parameters if p.variable_name not in session.variables], ensure_ascii=False)}
609
-
610
- Return as JSON object with parameter names as keys.
611
- """
612
-
613
- raw = await llm_generate(session, param_prompt, user_input)
614
- _extract_parameters_from_response(raw, session, intent_config)
615
-
616
- # Check what parameters are still missing
617
- missing_params = [
618
- p.name for p in intent_config.parameters # p.name olmalı, sadece p değil
619
- if p.required and p.variable_name not in session.variables
620
- ]
621
-
622
- if not missing_params:
623
- # All parameters collected, execute API
624
- return await _execute_api_call(session, intent_config)
625
- else:
626
- # Still need more parameters
627
- collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {})
628
- max_params = collection_config.get("max_params_per_question", 2)
629
-
630
- # Decide which parameters to ask
631
- params_to_ask = missing_params[:max_params]
632
-
633
- param_prompt = build_parameter_prompt(
634
- version=version,
635
- intent_config=intent_config,
636
- chat_history=session.chat_history,
637
- collected_params=session.variables,
638
- missing_params=missing_params,
639
- params_to_ask=params_to_ask,
640
- max_params=max_params,
641
- project_locale=project.default_locale,
642
- unanswered_params=session.unanswered_parameters
643
- )
644
- param_question = await llm_generate(session, param_prompt, user_input)
645
- return _trim_response(param_question)
646
-
647
- except Exception as e:
648
- log_error("❌ Error in handle_parameter_followup", e)
649
- session.reset_flow()
650
- return "Bir hata oluştu. Lütfen tekrar deneyin."
651
-
652
- def get_user_friendly_error(error_type: str, context: dict = None) -> str:
653
- """Get user-friendly error messages"""
654
- error_messages = {
655
- "session_not_found": "Oturumunuz bulunamadı. Lütfen yeni bir konuşma başlatın.",
656
- "project_not_found": "Proje konfigürasyonu bulunamadı. Lütfen yânetici ile iletişime geçin.",
657
- "version_not_found": "Proje versiyonu bulunamadΔ±. LΓΌtfen geΓ§erli bir versiyon seΓ§in.",
658
- "intent_not_found": "Üzgünüm, ne yapmak istediğinizi anlayamadım. Lütfen daha açık bir şekilde belirtir misiniz?",
659
- "api_timeout": "İşlem zaman aşımına uğradı. Lütfen tekrar deneyin.",
660
- "api_error": "İşlem sırasında bir hata oluştu. Lütfen daha sonra tekrar deneyin.",
661
- "parameter_validation": "Girdiğiniz bilgide bir hata var. Lütfen kontrol edip tekrar deneyin.",
662
- "llm_error": "Sistem yanΔ±t veremedi. LΓΌtfen biraz sonra tekrar deneyin.",
663
- "llm_timeout": "Sistem meşgul. Lütfen birkaç saniye bekleyip tekrar deneyin.",
664
- "session_expired": "Oturumunuz zaman aşımına uğradı. Lütfen yeni bir konuşma başlatın.",
665
- "rate_limit": "Γ‡ok fazla istek gΓΆnderdiniz. LΓΌtfen biraz bekleyin.",
666
- "internal_error": "Beklenmeyen bir hata oluştu. Lütfen yânetici ile iletişime geçin."
667
- }
668
-
669
- message = error_messages.get(error_type, error_messages["internal_error"])
670
-
671
- # Context bilgisi varsa ekle
672
- if context:
673
- if error_type == "parameter_validation" and "field" in context:
674
- message = f"{context['field']} alanΔ± iΓ§in {message}"
675
- elif error_type == "api_error" and "api_name" in context:
676
- message = f"{context['api_name']} servisi iΓ§in {message}"
677
-
678
- return message
679
-
680
- def validate_parameter_with_message(param_config, value, locale="tr") -> tuple[bool, str]:
681
- """Validate parameter and return user-friendly message"""
682
- try:
683
- # Type validation
684
- if param_config.type == "int":
685
- try:
686
- int(value)
687
- except ValueError:
688
- return False, f"LΓΌtfen geΓ§erli bir sayΔ± girin."
689
-
690
- elif param_config.type == "float":
691
- try:
692
- float(value)
693
- except ValueError:
694
- return False, f"LΓΌtfen geΓ§erli bir ondalΔ±k sayΔ± girin."
695
-
696
- elif param_config.type == "date":
697
- # Date parsing with locale support
698
- from locale_manager import LocaleManager
699
- parsed_date = LocaleManager.parse_date_expression(value, locale)
700
- if not parsed_date:
701
- return False, f"LΓΌtfen geΓ§erli bir tarih girin (ΓΆrn: yarΔ±n, 15 Haziran, 2025-06-15)."
702
-
703
- elif param_config.type == "bool":
704
- if value.lower() not in ["evet", "hayΔ±r", "yes", "no", "true", "false"]:
705
- return False, f"LΓΌtfen 'evet' veya 'hayΔ±r' olarak cevaplayΔ±n."
706
-
707
- # Regex validation
708
- if param_config.validation_regex:
709
- import re
710
- if not re.match(param_config.validation_regex, value):
711
- return False, param_config.invalid_prompt or "Girdiğiniz değer geçerli formatta değil."
712
-
713
- return True, ""
714
-
715
- except Exception as e:
716
- log_error(f"Parameter validation error", e)
717
- return False, "Değer kontrol edilirken bir hata oluştu."
718
-
719
- # Initialize LLM on module load
720
- setup_llm_provider()