Spaces:
Building
Building
""" | |
Flare – Chat Handler (Refactored with LLM Factory) | |
========================================== | |
""" | |
import re, json, sys, httpx, os | |
from datetime import datetime | |
from typing import Dict, List, Optional, Any | |
from fastapi import APIRouter, HTTPException, Header | |
from pydantic import BaseModel | |
import requests | |
from prompt_builder import build_intent_prompt, build_parameter_prompt | |
from logger import log_info, log_error, log_warning, log_debug | |
from api_executor import call_api as execute_api | |
from config_provider import ConfigProvider | |
from validation_engine import validate | |
from session import session_store, Session | |
# Initialize router | |
router = APIRouter() | |
# ───────────────────────── GLOBAL VARS ───────────────────────── # | |
cfg = ConfigProvider.get() | |
llm_provider = None | |
# ───────────────────────── HELPERS ───────────────────────── # | |
def _trim_response(raw: str) -> str: | |
""" | |
Remove everything after the first logical assistant block or intent tag. | |
Also strips trailing 'assistant' artifacts and prompt injections. | |
""" | |
# Stop at our own rules if model leaked them | |
for stop in ["#DETECTED_INTENT", "⚠️", "\nassistant", "assistant\n", "assistant"]: | |
idx = raw.find(stop) | |
if idx != -1: | |
raw = raw[:idx] | |
# Normalise selamlama | |
raw = re.sub(r"Hoş[\s-]?geldin(iz)?", "Hoş geldiniz", raw, flags=re.IGNORECASE) | |
return raw.strip() | |
def _safe_intent_parse(raw: str) -> tuple[str, str]: | |
"""Extract intent name and extra tail.""" | |
m = re.search(r"#DETECTED_INTENT:\s*([A-Za-z0-9_-]+)", raw) | |
if not m: | |
return "", raw | |
name = m.group(1) | |
# Remove 'assistant' suffix if exists | |
if name.endswith("assistant"): | |
name = name[:-9] # Remove last 9 chars ("assistant") | |
log_info(f"🔧 Removed 'assistant' suffix from intent name") | |
tail = raw[m.end():] | |
log_info(f"🎯 Parsed intent: {name}") | |
return name, tail | |
# ───────────────────────── LLM SETUP ───────────────────────── # | |
def setup_llm_provider(): | |
"""Initialize LLM provider using factory pattern""" | |
global llm_provider | |
try: | |
from llm_factory import LLMFactory | |
llm_provider = LLMFactory.create_provider() | |
log_info("✅ LLM provider initialized successfully") | |
except Exception as e: | |
log_error("❌ Failed to initialize LLM provider", e) | |
raise | |
# ───────────────────────── LLM GENERATION ───────────────────────── # | |
async def llm_generate(s: Session, prompt: str, user_msg: str) -> str: | |
"""Call LLM provider with proper error handling""" | |
global llm_provider | |
if llm_provider is None: | |
setup_llm_provider() | |
try: | |
# Get version config from session | |
version = s.get_version_config() | |
if not version: | |
# Fallback: get from project config | |
project = next((p for p in cfg.projects if p.name == s.project_name), None) | |
if not project: | |
raise ValueError(f"Project not found: {s.project_name}") | |
version = next((v for v in project.versions if v.published), None) | |
if not version: | |
raise ValueError("No published version found") | |
log_info(f"🚀 Calling LLM for session {s.session_id[:8]}...") | |
log_info(f"📋 Prompt preview (first 200 chars): {prompt[:200]}...") | |
history = s.chat_history | |
# Call the configured LLM provider | |
raw = await llm_provider.generate( | |
user_input=user_msg, | |
system_prompt=prompt, | |
context=history[-10:] if history else [] | |
) | |
log_info(f"🪄 LLM raw response: {raw[:100]}...") | |
return raw | |
except requests.exceptions.Timeout: | |
log_warning(f"⏱️ LLM timeout for session {s.session_id[:8]}") | |
raise HTTPException(status_code=504, detail="LLM request timed out") | |
except Exception as e: | |
log_error("❌ LLM error", e) | |
raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}") | |
# ───────────────────────── PARAMETER EXTRACTION ───────────────────────── # | |
def _extract_parameters_from_response(raw: str, session: Session, intent_config) -> bool: | |
"""Extract parameters from the LLM response""" | |
try: | |
# Look for JSON block in response | |
json_match = re.search(r'```json\s*(.*?)\s*```', raw, re.DOTALL) | |
if not json_match: | |
# Try to find JSON without code block | |
json_match = re.search(r'\{[^}]+\}', raw) | |
if not json_match: | |
log_info("❌ No JSON found in response") | |
return False | |
json_str = json_match.group(1) if '```' in raw else json_match.group(0) | |
params = json.loads(json_str) | |
any_valid = False | |
for param_name, param_value in params.items(): | |
# Find parameter config | |
param_config = next( | |
(p for p in intent_config.parameters if p.name == param_name), | |
None | |
) | |
if not param_config: | |
log_info(f"⚠️ Parameter config not found for: {param_name}") | |
continue | |
# Validate parameter | |
if validate(str(param_value), param_config): | |
session.variables[param_config.variable_name] = str(param_value) | |
any_valid = True | |
log_info(f"✅ Extracted {param_name}={param_value} → {param_config.variable_name}") | |
else: | |
log_info(f"❌ Invalid {param_name}={param_value}") | |
return any_valid | |
except json.JSONDecodeError as e: | |
log_error("❌ JSON parsing error", e) | |
log_error(f"❌ Failed to parse: {raw[:200]}") | |
return False | |
except Exception as e: | |
log_error("❌ Parameter processing error", e) | |
return False | |
# ───────────────────────── API EXECUTION ───────────────────────── # | |
async def _execute_api_call(session: Session, intent_config) -> str: | |
"""Execute API call and return humanized response with better error handling""" | |
try: | |
session.state = "call_api" | |
api_name = intent_config.action | |
api_config = cfg.get_api(api_name) | |
if not api_config: | |
log_info(f"❌ API config not found: {api_name}") | |
session.reset_flow() | |
return get_user_friendly_error("api_error", {"api_name": api_name}) | |
log_info(f"📡 Calling API: {api_name}") | |
log_info(f"📦 API variables: {session.variables}") | |
# Execute API call with session | |
response = execute_api(api_config, session) | |
api_json = response.json() | |
log_info(f"✅ API response: {api_json}") | |
# Humanize response | |
session.state = "humanize" | |
if api_config.response_prompt: | |
prompt = api_config.response_prompt.replace( | |
"{{api_response}}", | |
json.dumps(api_json, ensure_ascii=False) | |
) | |
human_response = await llm_generate(session, prompt, json.dumps(api_json)) | |
session.reset_flow() | |
return human_response if human_response else f"İşlem sonucu: {api_json}" | |
else: | |
session.reset_flow() | |
return f"İşlem tamamlandı: {api_json}" | |
except requests.exceptions.Timeout: | |
log_warning(f"⏱️ API timeout: {api_name}") | |
session.reset_flow() | |
return get_user_friendly_error("api_timeout") | |
except Exception as e: | |
log_error("❌ API call error", e) | |
session.reset_flow() | |
return get_user_friendly_error("api_error", {"api_name": api_name}) | |
# ───────────────────────── REQUEST MODELS ───────────────────────── # | |
class ChatRequest(BaseModel): | |
message: str | |
class StartRequest(BaseModel): | |
project_name: str | |
version_no: Optional[int] = None # Opsiyonel, belirtilmezse published olan en büyük version no'yu kullan | |
is_realtime: bool = False | |
locale: Optional[str] = None | |
class ChatResponse(BaseModel): | |
session_id: str | |
answer: str | |
# ───────────────────────── API ENDPOINTS ───────────────────────── # | |
async def start_session(req: StartRequest): | |
"""Create new session""" | |
global llm_provider | |
try: | |
# Validate project exists | |
project = next((p for p in cfg.projects if p.name == req.project_name and p.enabled), None) | |
if not project: | |
raise HTTPException(404, f"Project '{req.project_name}' not found or disabled") | |
# Determine locale | |
session_locale = req.locale | |
if not session_locale: | |
# Use project's default locale | |
session_locale = project.default_locale | |
# Validate locale is supported by project | |
if session_locale not in project.supported_locales: | |
raise HTTPException( | |
400, | |
f"Locale '{session_locale}' not supported by project. Supported: {project.supported_locales}" | |
) | |
# Find version | |
if req.version_no: | |
# Specific version requested | |
version = next((v for v in project.versions if v.no == req.version_no), None) | |
if not version: | |
raise HTTPException(404, f"Version {req.version_no} not found for project '{req.project_name}'") | |
else: | |
# Find published version with highest version number | |
published_versions = [v for v in project.versions if v.published] | |
if not published_versions: | |
raise HTTPException(404, f"No published version for project '{req.project_name}'") | |
# Sort by version number (no) and get the highest | |
version = max(published_versions, key=lambda v: v.no) | |
# Create LLM provider if not exists | |
if not llm_provider: | |
from llm_factory import LLMFactory | |
llm_provider = LLMFactory.create_provider() | |
log_info(f"🤖 LLM Provider created: {type(llm_provider).__name__}") | |
# Create session with version config - PARAMETRE DÜZELTMESİ | |
session = session_store.create_session( | |
project_name=req.project_name, | |
version_no=version.no, | |
is_realtime=req.is_realtime, | |
locale=session_locale | |
) | |
# Version config'i session'a ekle | |
session.set_version_config(version) | |
# Welcome prompt'u işle | |
greeting = "Hoş geldiniz! Size nasıl yardımcı olabilirim?" | |
if version.welcome_prompt: | |
log_info(f"🎉 Processing welcome prompt for session {session.session_id[:8]}...") | |
try: | |
# Welcome prompt'u LLM'e gönder | |
welcome_result = await llm_provider.generate( | |
user_input="", | |
system_prompt=version.welcome_prompt, | |
context=[] | |
) | |
if welcome_result and welcome_result.strip(): | |
greeting = welcome_result.strip() | |
except Exception as e: | |
log_error("⚠️ Welcome prompt processing failed", e) | |
# Fallback to default greeting | |
session.add_turn("assistant", greeting) | |
log_info(f"✅ Session created for project '{req.project_name}' version {version.no} (highest published)") | |
return ChatResponse(session_id=session.session_id, answer=greeting) | |
except HTTPException: | |
raise | |
except Exception as e: | |
log_error("❌ Session creation error", e) | |
raise HTTPException(500, f"Session creation failed: {str(e)}") | |
async def chat(req: ChatRequest, x_session_id: str = Header(...)): | |
"""Process chat message""" | |
try: | |
# Get session | |
session = session_store.get_session(x_session_id) | |
if not session: | |
# Better error message | |
raise HTTPException( | |
status_code=404, | |
detail=get_user_friendly_error("session_not_found") | |
) | |
# Session expiry check ekle | |
if session.is_expired(): | |
session_store.delete_session(x_session_id) | |
raise HTTPException( | |
status_code=401, | |
detail=get_user_friendly_error("session_expired") | |
) | |
# Update last activity | |
session.last_activity = datetime.utcnow().isoformat() | |
session_store.update_session(session) | |
# Mevcut kod devam ediyor... | |
# Add user message to history | |
session.add_message("user", req.message) | |
log_info(f"💬 User [{session.session_id[:8]}...]: {req.message}") | |
# Get project and version config | |
project = next((p for p in cfg.projects if p.name == session.project_name), None) | |
if not project: | |
raise HTTPException( | |
status_code=404, | |
detail=get_user_friendly_error("project_not_found") | |
) | |
version = session.get_version_config() | |
if not version: | |
raise HTTPException( | |
status_code=400, | |
detail=get_user_friendly_error("version_not_found") | |
) | |
# Process based on current state | |
if session.state == "idle": | |
# Build intent detection prompt | |
prompt = build_intent_prompt(version, session.chat_history, project.default_locale) | |
raw = await llm_generate(session, prompt, req.message) | |
# Check for intent | |
intent_name, tail = _safe_intent_parse(raw) | |
if intent_name: | |
# Find intent config | |
intent_config = next((i for i in version.intents if i.name == intent_name), None) | |
if intent_config: | |
session.current_intent = intent_name | |
session.intent_config = intent_config | |
session.state = "collect_params" | |
log_info(f"🎯 Intent detected: {intent_name}") | |
# Check if parameters were already extracted | |
if tail and _extract_parameters_from_response(tail, session, intent_config): | |
log_info("📦 Some parameters extracted from initial response") | |
# Check what parameters are missing | |
missing_params = [ | |
p.name for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All required parameters collected, execute API | |
response = await _execute_api_call(session, intent_config) | |
session.add_message("assistant", response) | |
return {"response": response, "intent": intent_name, "state": "completed"} | |
else: | |
# Need to collect more parameters | |
# Get parameter collection config | |
collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {}) | |
max_params = collection_config.get("max_params_per_question", 2) | |
# Decide which parameters to ask | |
params_to_ask = missing_params[:max_params] | |
param_prompt = build_parameter_prompt( | |
version=version, | |
intent_config=intent_config, | |
chat_history=session.chat_history, | |
collected_params=session.variables, | |
missing_params=missing_params, | |
params_to_ask=params_to_ask, | |
max_params=max_params, | |
project_locale=project.default_locale, | |
unanswered_params=session.unanswered_parameters | |
) | |
param_question = await llm_generate(session, param_prompt, req.message) | |
clean_question = _trim_response(param_question) | |
session.add_message("assistant", clean_question) | |
return {"response": clean_question, "intent": intent_name, "state": "collecting_params"} | |
else: | |
log_info(f"⚠️ Unknown intent: {intent_name}") | |
# No intent detected, return general response | |
clean_response = _trim_response(raw) | |
session.add_message("assistant", clean_response) | |
return {"response": clean_response, "state": "idle"} | |
elif session.state == "collect_params": | |
# Continue parameter collection | |
intent_config = session.intent_config | |
# Try to extract parameters from user message | |
param_prompt = f""" | |
Extract parameters from user message: "{req.message}" | |
Expected parameters: | |
{json.dumps([{ | |
'name': p.name, | |
'type': p.type, | |
'required': p.required, | |
'extraction_prompt': p.extraction_prompt | |
} for p in intent_config.parameters if p.variable_name not in session.variables], ensure_ascii=False)} | |
Return as JSON object with parameter names as keys. | |
""" | |
raw = await llm_generate(session, param_prompt, req.message) | |
_extract_parameters_from_response(raw, session, intent_config) | |
# Check what parameters are still missing | |
missing_params = [ | |
p.name for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All parameters collected, execute API | |
response = await _execute_api_call(session, intent_config) | |
session.add_message("assistant", response) | |
return {"response": response, "intent": session.current_intent, "state": "completed"} | |
else: | |
# Still need more parameters | |
# Get parameter collection config | |
collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {}) | |
max_params = collection_config.get("max_params_per_question", 2) | |
# Decide which parameters to ask | |
params_to_ask = missing_params[:max_params] | |
param_prompt = build_parameter_prompt( | |
version=version, | |
intent_config=intent_config, | |
chat_history=session.chat_history, | |
collected_params=session.variables, | |
missing_params=missing_params, | |
params_to_ask=params_to_ask, | |
max_params=max_params, | |
project_locale=project.default_locale, | |
unanswered_params=session.unanswered_parameters | |
) | |
param_question = await llm_generate(session, param_prompt, req.message) | |
clean_question = _trim_response(param_question) | |
session.add_message("assistant", clean_question) | |
return {"response": clean_question, "intent": session.current_intent, "state": "collecting_params"} | |
else: | |
# Unknown state, reset | |
session.reset_flow() | |
return {"response": get_user_friendly_error("internal_error"), "state": "error"} | |
except HTTPException: | |
raise | |
except requests.exceptions.Timeout: | |
# Better timeout error | |
log_error(f"Timeout in chat for session {x_session_id[:8]}") | |
return { | |
"response": get_user_friendly_error("llm_timeout"), | |
"state": "error", | |
"error": True | |
} | |
except Exception as e: | |
log_error("❌ Chat error", e) | |
import traceback | |
traceback.print_exc() | |
# Better generic error | |
return { | |
"response": get_user_friendly_error("internal_error"), | |
"state": "error", | |
"error": True | |
} | |
async def handle_new_message(session: Session, user_input: str) -> str: | |
"""Handle new message (not parameter followup) - for WebSocket""" | |
try: | |
# Get version config from session | |
version = session.get_version_config() | |
if not version: | |
log_info("❌ Version config not found") | |
return "Bir hata oluştu. Lütfen tekrar deneyin." | |
# Get project config | |
project = next((p for p in cfg.projects if p.name == session.project_name), None) | |
if not project: | |
return "Proje konfigürasyonu bulunamadı." | |
# Build intent detection prompt | |
prompt = build_intent_prompt(version, session.chat_history, project.default_locale) | |
# Get LLM response | |
raw = await llm_generate(session, prompt, user_input) | |
# Empty response fallback | |
if not raw: | |
log_info("⚠️ Empty response from LLM") | |
return "Üzgünüm, mesajınızı anlayamadım. Lütfen tekrar dener misiniz?" | |
# Check for intent | |
intent_name, tail = _safe_intent_parse(raw) | |
if intent_name: | |
# Find intent config | |
intent_config = next((i for i in version.intents if i.name == intent_name), None) | |
if intent_config: | |
session.current_intent = intent_name | |
session.intent_config = intent_config | |
session.state = "collect_params" | |
log_info(f"🎯 Intent detected: {intent_name}") | |
# Check if parameters were already extracted | |
if tail and _extract_parameters_from_response(tail, session, intent_config): | |
log_info("📦 Some parameters extracted from initial response") | |
# Check what parameters are missing | |
missing_params = [ | |
p.name for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All required parameters collected, execute API | |
return await _execute_api_call(session, intent_config) | |
else: | |
# Need to collect more parameters | |
collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {}) | |
max_params = collection_config.get("max_params_per_question", 2) | |
# Decide which parameters to ask | |
params_to_ask = missing_params[:max_params] | |
param_prompt = build_parameter_prompt( | |
version=version, | |
intent_config=intent_config, | |
chat_history=session.chat_history, | |
collected_params=session.variables, | |
missing_params=missing_params, | |
params_to_ask=params_to_ask, | |
max_params=max_params, | |
project_locale=project.default_locale, | |
unanswered_params=session.unanswered_parameters | |
) | |
param_question = await llm_generate(session, param_prompt, user_input) | |
return _trim_response(param_question) | |
# No intent detected, return general response | |
return _trim_response(raw) | |
except Exception as e: | |
log_error("❌ Error in handle_new_message", e) | |
return "Bir hata oluştu. Lütfen tekrar deneyin." | |
async def handle_parameter_followup(session: Session, user_input: str) -> str: | |
"""Handle parameter collection followup - for WebSocket""" | |
try: | |
if not session.intent_config: | |
log_info("⚠️ No intent config in session") | |
session.reset_flow() | |
return "Üzgünüm, hangi işlem için bilgi istediğimi unuttum. Baştan başlayalım." | |
intent_config = session.intent_config | |
# Get project config and version | |
project = next((p for p in cfg.projects if p.name == session.project_name), None) | |
if not project: | |
return "Proje konfigürasyonu bulunamadı." | |
version = session.get_version_config() | |
if not version: | |
return "Versiyon konfigürasyonu bulunamadı." | |
# Try to extract parameters from user message | |
param_prompt = f""" | |
Extract parameters from user message: "{user_input}" | |
Expected parameters: | |
{json.dumps([{ | |
'name': p.name, | |
'type': p.type, | |
'required': p.required, | |
'extraction_prompt': p.extraction_prompt | |
} for p in intent_config.parameters if p.variable_name not in session.variables], ensure_ascii=False)} | |
Return as JSON object with parameter names as keys. | |
""" | |
raw = await llm_generate(session, param_prompt, user_input) | |
_extract_parameters_from_response(raw, session, intent_config) | |
# Check what parameters are still missing | |
missing_params = [ | |
p.name for p in intent_config.parameters # p.name olmalı, sadece p değil | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All parameters collected, execute API | |
return await _execute_api_call(session, intent_config) | |
else: | |
# Still need more parameters | |
collection_config = cfg.global_config.llm_provider.settings.get("parameter_collection_config", {}) | |
max_params = collection_config.get("max_params_per_question", 2) | |
# Decide which parameters to ask | |
params_to_ask = missing_params[:max_params] | |
param_prompt = build_parameter_prompt( | |
version=version, | |
intent_config=intent_config, | |
chat_history=session.chat_history, | |
collected_params=session.variables, | |
missing_params=missing_params, | |
params_to_ask=params_to_ask, | |
max_params=max_params, | |
project_locale=project.default_locale, | |
unanswered_params=session.unanswered_parameters | |
) | |
param_question = await llm_generate(session, param_prompt, user_input) | |
return _trim_response(param_question) | |
except Exception as e: | |
log_error("❌ Error in handle_parameter_followup", e) | |
session.reset_flow() | |
return "Bir hata oluştu. Lütfen tekrar deneyin." | |
def get_user_friendly_error(error_type: str, context: dict = None) -> str: | |
"""Get user-friendly error messages""" | |
error_messages = { | |
"session_not_found": "Oturumunuz bulunamadı. Lütfen yeni bir konuşma başlatın.", | |
"project_not_found": "Proje konfigürasyonu bulunamadı. Lütfen yönetici ile iletişime geçin.", | |
"version_not_found": "Proje versiyonu bulunamadı. Lütfen geçerli bir versiyon seçin.", | |
"intent_not_found": "Üzgünüm, ne yapmak istediğinizi anlayamadım. Lütfen daha açık bir şekilde belirtir misiniz?", | |
"api_timeout": "İşlem zaman aşımına uğradı. Lütfen tekrar deneyin.", | |
"api_error": "İşlem sırasında bir hata oluştu. Lütfen daha sonra tekrar deneyin.", | |
"parameter_validation": "Girdiğiniz bilgide bir hata var. Lütfen kontrol edip tekrar deneyin.", | |
"llm_error": "Sistem yanıt veremedi. Lütfen biraz sonra tekrar deneyin.", | |
"llm_timeout": "Sistem meşgul. Lütfen birkaç saniye bekleyip tekrar deneyin.", | |
"session_expired": "Oturumunuz zaman aşımına uğradı. Lütfen yeni bir konuşma başlatın.", | |
"rate_limit": "Çok fazla istek gönderdiniz. Lütfen biraz bekleyin.", | |
"internal_error": "Beklenmeyen bir hata oluştu. Lütfen yönetici ile iletişime geçin." | |
} | |
message = error_messages.get(error_type, error_messages["internal_error"]) | |
# Context bilgisi varsa ekle | |
if context: | |
if error_type == "parameter_validation" and "field" in context: | |
message = f"{context['field']} alanı için {message}" | |
elif error_type == "api_error" and "api_name" in context: | |
message = f"{context['api_name']} servisi için {message}" | |
return message | |
def validate_parameter_with_message(param_config, value, locale="tr") -> tuple[bool, str]: | |
"""Validate parameter and return user-friendly message""" | |
try: | |
# Type validation | |
if param_config.type == "int": | |
try: | |
int(value) | |
except ValueError: | |
return False, f"Lütfen geçerli bir sayı girin." | |
elif param_config.type == "float": | |
try: | |
float(value) | |
except ValueError: | |
return False, f"Lütfen geçerli bir ondalık sayı girin." | |
elif param_config.type == "date": | |
# Date parsing with locale support | |
from locale_manager import LocaleManager | |
parsed_date = LocaleManager.parse_date_expression(value, locale) | |
if not parsed_date: | |
return False, f"Lütfen geçerli bir tarih girin (örn: yarın, 15 Haziran, 2025-06-15)." | |
elif param_config.type == "bool": | |
if value.lower() not in ["evet", "hayır", "yes", "no", "true", "false"]: | |
return False, f"Lütfen 'evet' veya 'hayır' olarak cevaplayın." | |
# Regex validation | |
if param_config.validation_regex: | |
import re | |
if not re.match(param_config.validation_regex, value): | |
return False, param_config.invalid_prompt or "Girdiğiniz değer geçerli formatta değil." | |
return True, "" | |
except Exception as e: | |
log_error(f"Parameter validation error", e) | |
return False, "Değer kontrol edilirken bir hata oluştu." | |
# Initialize LLM on module load | |
setup_llm_provider() |