Spaces:
Building
Building
""" | |
Flare β Chat Handler (Refactored with LLM Factory) | |
========================================== | |
""" | |
import re, json, sys, httpx, os | |
from datetime import datetime | |
from typing import Dict, List, Optional, Any | |
from fastapi import APIRouter, HTTPException, Header | |
from pydantic import BaseModel | |
import requests | |
from prompt_builder import build_intent_prompt, build_parameter_prompt | |
from utils import log | |
from api_executor import call_api as execute_api | |
from config_provider import ConfigProvider | |
from validation_engine import validate | |
from session import session_store, Session | |
# Initialize router | |
router = APIRouter() | |
# βββββββββββββββββββββββββ GLOBAL VARS βββββββββββββββββββββββββ # | |
cfg = ConfigProvider.get() | |
llm_provider = None | |
# βββββββββββββββββββββββββ HELPERS βββββββββββββββββββββββββ # | |
def _trim_response(raw: str) -> str: | |
""" | |
Remove everything after the first logical assistant block or intent tag. | |
Also strips trailing 'assistant' artifacts and prompt injections. | |
""" | |
# Stop at our own rules if model leaked them | |
for stop in ["#DETECTED_INTENT", "β οΈ", "\nassistant", "assistant\n", "assistant"]: | |
idx = raw.find(stop) | |
if idx != -1: | |
raw = raw[:idx] | |
# Normalise selamlama | |
raw = re.sub(r"HoΕ[\s-]?geldin(iz)?", "HoΕ geldiniz", raw, flags=re.IGNORECASE) | |
return raw.strip() | |
def _safe_intent_parse(raw: str) -> tuple[str, str]: | |
"""Extract intent name and extra tail.""" | |
m = re.search(r"#DETECTED_INTENT:\s*([A-Za-z0-9_-]+)", raw) | |
if not m: | |
return "", raw | |
name = m.group(1) | |
# Remove 'assistant' suffix if exists | |
if name.endswith("assistant"): | |
name = name[:-9] # Remove last 9 chars ("assistant") | |
log(f"π§ Removed 'assistant' suffix from intent name") | |
tail = raw[m.end():] | |
log(f"π― Parsed intent: {name}") | |
return name, tail | |
# βββββββββββββββββββββββββ LLM SETUP βββββββββββββββββββββββββ # | |
def setup_llm_provider(): | |
"""Initialize LLM provider using factory pattern""" | |
global llm_provider | |
try: | |
from llm_factory import LLMFactory | |
llm_provider = LLMFactory.create_provider() | |
log("β LLM provider initialized successfully") | |
except Exception as e: | |
log(f"β Failed to initialize LLM provider: {e}") | |
raise | |
# βββββββββββββββββββββββββ LLM GENERATION βββββββββββββββββββββββββ # | |
async def llm_generate(s: Session, prompt: str, user_msg: str) -> str: | |
"""Call LLM provider with proper error handling""" | |
global llm_provider | |
if llm_provider is None: | |
setup_llm_provider() | |
try: | |
# Get version config from session | |
version = s.get_version_config() | |
if not version: | |
# Fallback: get from project config | |
project = next((p for p in cfg.projects if p.name == s.project_name), None) | |
if not project: | |
raise ValueError(f"Project not found: {s.project_name}") | |
version = next((v for v in project.versions if v.published), None) | |
if not version: | |
raise ValueError("No published version found") | |
log(f"π Calling LLM for session {s.session_id[:8]}...") | |
log(f"π Prompt preview (first 200 chars): {prompt[:200]}...") | |
# Call the configured LLM provider | |
raw = await llm_provider.generate( | |
user_input=user_msg, | |
system_prompt=prompt, | |
context=s.chat_history[-10:] if s.chat_history else [] | |
) | |
log(f"πͺ LLM raw response: {raw[:100]}...") | |
return raw | |
except requests.exceptions.Timeout: | |
log(f"β±οΈ LLM timeout for session {s.session_id[:8]}") | |
raise HTTPException(status_code=504, detail="LLM request timed out") | |
except Exception as e: | |
log(f"β LLM error: {e}") | |
raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}") | |
# βββββββββββββββββββββββββ PARAMETER EXTRACTION βββββββββββββββββββββββββ # | |
def _extract_parameters_from_response(raw: str, session: Session, intent_config) -> bool: | |
"""Extract parameters from the LLM response""" | |
try: | |
# Look for JSON block in response | |
json_match = re.search(r'```json\s*(.*?)\s*```', raw, re.DOTALL) | |
if not json_match: | |
# Try to find JSON without code block | |
json_match = re.search(r'\{[^}]+\}', raw) | |
if not json_match: | |
log("β No JSON found in response") | |
return False | |
json_str = json_match.group(1) if '```' in raw else json_match.group(0) | |
params = json.loads(json_str) | |
any_valid = False | |
for param_name, param_value in params.items(): | |
# Find parameter config | |
param_config = next( | |
(p for p in intent_config.parameters if p.name == param_name), | |
None | |
) | |
if not param_config: | |
log(f"β οΈ Parameter config not found for: {param_name}") | |
continue | |
# Validate parameter | |
if validate(str(param_value), param_config): | |
session.variables[param_config.variable_name] = str(param_value) | |
any_valid = True | |
log(f"β Extracted {param_name}={param_value} β {param_config.variable_name}") | |
else: | |
log(f"β Invalid {param_name}={param_value}") | |
return any_valid | |
except json.JSONDecodeError as e: | |
log(f"β JSON parsing error: {e}") | |
log(f"β Failed to parse: {raw[:200]}") | |
return False | |
except Exception as e: | |
log(f"β Parameter processing error: {e}") | |
return False | |
# βββββββββββββββββββββββββ API EXECUTION βββββββββββββββββββββββββ # | |
async def _execute_api_call(session: Session, intent_config) -> str: | |
"""Execute API call and return humanized response""" | |
try: | |
session.state = "call_api" | |
api_name = intent_config.action | |
api_config = cfg.get_api(api_name) | |
if not api_config: | |
log(f"β API config not found: {api_name}") | |
session.reset_flow() | |
return intent_config.fallback_error_prompt or "Δ°Εlem baΕarΔ±sΔ±z oldu." | |
log(f"π‘ Calling API: {api_name}") | |
log(f"π¦ API variables: {session.variables}") | |
# Execute API call with session | |
response = execute_api(api_config, session) | |
api_json = response.json() | |
log(f"β API response: {api_json}") | |
# Humanize response | |
session.state = "humanize" | |
if api_config.response_prompt: | |
prompt = api_config.response_prompt.replace( | |
"{{api_response}}", | |
json.dumps(api_json, ensure_ascii=False) | |
) | |
human_response = await llm_generate(session, prompt, json.dumps(api_json)) | |
session.reset_flow() | |
return human_response if human_response else f"Δ°Εlem sonucu: {api_json}" | |
else: | |
session.reset_flow() | |
return f"Δ°Εlem tamamlandΔ±: {api_json}" | |
except requests.exceptions.Timeout: | |
log(f"β±οΈ API timeout: {api_name}") | |
session.reset_flow() | |
return intent_config.fallback_timeout_prompt or "Δ°Εlem zaman aΕΔ±mΔ±na uΔradΔ±." | |
except Exception as e: | |
log(f"β API call error: {e}") | |
session.reset_flow() | |
return intent_config.fallback_error_prompt or "Δ°Εlem sΔ±rasΔ±nda bir hata oluΕtu." | |
# βββββββββββββββββββββββββ REQUEST MODELS βββββββββββββββββββββββββ # | |
class ChatRequest(BaseModel): | |
message: str | |
class StartRequest(BaseModel): | |
project_name: str | |
version_no: Optional[int] = None # Opsiyonel, belirtilmezse published olan en bΓΌyΓΌk version no'yu kullan | |
class ChatResponse(BaseModel): | |
session_id: str | |
answer: str | |
# βββββββββββββββββββββββββ API ENDPOINTS βββββββββββββββββββββββββ # | |
async def start_session(req: StartRequest): | |
"""Create new session""" | |
global llm_provider | |
try: | |
# Validate project exists | |
project = next((p for p in cfg.projects if p.name == req.project_name and p.enabled), None) | |
if not project: | |
raise HTTPException(404, f"Project '{req.project_name}' not found or disabled") | |
# Find version | |
if req.version_no: | |
# Specific version requested | |
version = next((v for v in project.versions if v.no == req.version_no), None) | |
if not version: | |
raise HTTPException(404, f"Version {req.version_no} not found for project '{req.project_name}'") | |
else: | |
# Find published version with highest version number | |
published_versions = [v for v in project.versions if v.published] | |
if not published_versions: | |
raise HTTPException(404, f"No published version for project '{req.project_name}'") | |
# Sort by version number (no) and get the highest | |
version = max(published_versions, key=lambda v: v.no) | |
# Create LLM provider if not exists | |
if not llm_provider: | |
from llm_factory import LLMFactory | |
llm_provider = LLMFactory.create_provider() | |
log(f"π€ LLM Provider created: {type(llm_provider).__name__}") | |
# Create session with version config | |
session_id = session_store.create(req.project_name, version) | |
session = session_store.get(session_id) | |
# Welcome prompt'u iΕle | |
greeting = "HoΕ geldiniz! Size nasΔ±l yardΔ±mcΔ± olabilirim?" | |
if version.welcome_prompt: | |
log(f"π Processing welcome prompt for session {session_id[:8]}...") | |
try: | |
# Welcome prompt'u LLM'e gΓΆnder | |
welcome_result = await llm_provider.generate( | |
prompt=version.welcome_prompt, | |
max_tokens=200, | |
temperature=0.7 | |
) | |
if welcome_result and welcome_result.strip(): | |
greeting = welcome_result.strip() | |
except Exception as e: | |
log(f"β οΈ Welcome prompt processing failed: {e}") | |
# Fallback to default greeting | |
session.add_turn("assistant", greeting) | |
log(f"β Session created for project '{req.project_name}' version {version.no} (highest published)") | |
return ChatResponse(session_id=session.session_id, answer=greeting) | |
except HTTPException: | |
raise | |
except Exception as e: | |
log(f"β Session creation error: {e}") | |
raise HTTPException(500, f"Session creation failed: {str(e)}") | |
async def chat(req: ChatRequest, x_session_id: str = Header(...)): | |
"""Process chat message""" | |
try: | |
# Get session | |
session = session_store.get(x_session_id) | |
if not session: | |
raise HTTPException(status_code=404, detail="Session not found or expired") | |
# Add user message to history | |
session.add_message("user", req.message) | |
log(f"π¬ User [{session.session_id[:8]}...]: {req.message}") | |
# Get project and version config | |
project = next((p for p in cfg.projects if p.name == session.project_name), None) | |
if not project: | |
raise HTTPException(status_code=404, detail=f"Project '{session.project_name}' not found") | |
version = session.get_version_config() | |
if not version: | |
raise HTTPException(status_code=400, detail="Version config not found in session") | |
# Process based on current state | |
if session.state == "idle": | |
# Build intent detection prompt | |
prompt = build_intent_prompt(version, session.chat_history, project.default_locale) | |
raw = await llm_generate(session, prompt, req.message) | |
# Check for intent | |
intent_name, tail = _safe_intent_parse(raw) | |
if intent_name: | |
# Find intent config | |
intent_config = next((i for i in version.intents if i.name == intent_name), None) | |
if intent_config: | |
session.current_intent = intent_name | |
session.intent_config = intent_config | |
session.state = "collect_params" | |
log(f"π― Intent detected: {intent_name}") | |
# Check if parameters were already extracted | |
if tail and _extract_parameters_from_response(tail, session, intent_config): | |
log("π¦ Some parameters extracted from initial response") | |
# Check what parameters are missing | |
missing_params = [ | |
p for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All required parameters collected, execute API | |
response = await _execute_api_call(session, intent_config) | |
session.add_message("assistant", response) | |
return {"response": response, "intent": intent_name, "state": "completed"} | |
else: | |
# Need to collect more parameters | |
param_prompt = build_parameter_prompt( | |
intent_config, | |
session.variables, | |
session.chat_history, | |
project.default_locale | |
) | |
param_question = await llm_generate(session, param_prompt, req.message) | |
clean_question = _trim_response(param_question) | |
session.add_message("assistant", clean_question) | |
return {"response": clean_question, "intent": intent_name, "state": "collecting_params"} | |
else: | |
log(f"β οΈ Unknown intent: {intent_name}") | |
# No intent detected, return general response | |
clean_response = _trim_response(raw) | |
session.add_message("assistant", clean_response) | |
return {"response": clean_response, "state": "idle"} | |
elif session.state == "collect_params": | |
# Continue parameter collection | |
intent_config = session.intent_config | |
# Try to extract parameters from user message | |
param_prompt = f""" | |
Extract parameters from user message: "{req.message}" | |
Expected parameters: | |
{json.dumps([{ | |
'name': p.name, | |
'type': p.type, | |
'required': p.required, | |
'extraction_prompt': p.extraction_prompt | |
} for p in intent_config.parameters if p.variable_name not in session.variables], ensure_ascii=False)} | |
Return as JSON object with parameter names as keys. | |
""" | |
raw = await llm_generate(session, param_prompt, req.message) | |
_extract_parameters_from_response(raw, session, intent_config) | |
# Check what parameters are still missing | |
missing_params = [ | |
p for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All parameters collected, execute API | |
response = await _execute_api_call(session, intent_config) | |
session.add_message("assistant", response) | |
return {"response": response, "intent": session.current_intent, "state": "completed"} | |
else: | |
# Still need more parameters | |
param_prompt = build_parameter_prompt( | |
intent_config, | |
session.variables, | |
session.chat_history, | |
project.default_locale | |
) | |
param_question = await llm_generate(session, param_prompt, req.message) | |
clean_question = _trim_response(param_question) | |
session.add_message("assistant", clean_question) | |
return {"response": clean_question, "intent": session.current_intent, "state": "collecting_params"} | |
else: | |
# Unknown state, reset | |
session.reset_flow() | |
return {"response": "Bir hata oluΕtu, lΓΌtfen tekrar deneyin.", "state": "error"} | |
except HTTPException: | |
raise | |
except Exception as e: | |
log(f"β Chat error: {e}") | |
import traceback | |
traceback.print_exc() | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def handle_new_message(session: Session, user_input: str) -> str: | |
"""Handle new message (not parameter followup) - for WebSocket""" | |
try: | |
# Get version config from session | |
version = session.get_version_config() | |
if not version: | |
log("β Version config not found") | |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin." | |
# Get project config | |
project = next((p for p in cfg.projects if p.name == session.project_name), None) | |
if not project: | |
return "Proje konfigΓΌrasyonu bulunamadΔ±." | |
# Build intent detection prompt | |
prompt = build_intent_prompt(version, session.chat_history, project.default_locale) | |
# Get LLM response | |
raw = await llm_generate(session, prompt, user_input) | |
# Empty response fallback | |
if not raw: | |
log("β οΈ Empty response from LLM") | |
return "ΓzgΓΌnΓΌm, mesajΔ±nΔ±zΔ± anlayamadΔ±m. LΓΌtfen tekrar dener misiniz?" | |
# Check for intent | |
intent_name, tail = _safe_intent_parse(raw) | |
if intent_name: | |
# Find intent config | |
intent_config = next((i for i in version.intents if i.name == intent_name), None) | |
if intent_config: | |
session.current_intent = intent_name | |
session.intent_config = intent_config | |
session.state = "collect_params" | |
log(f"π― Intent detected: {intent_name}") | |
# Check if parameters were already extracted | |
if tail and _extract_parameters_from_response(tail, session, intent_config): | |
log("π¦ Some parameters extracted from initial response") | |
# Check what parameters are missing | |
missing_params = [ | |
p for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All required parameters collected, execute API | |
return await _execute_api_call(session, intent_config) | |
else: | |
# Need to collect more parameters | |
param_prompt = build_parameter_prompt( | |
intent_config, | |
session.variables, | |
session.chat_history, | |
project.default_locale | |
) | |
param_question = await llm_generate(session, param_prompt, user_input) | |
return _trim_response(param_question) | |
# No intent detected, return general response | |
return _trim_response(raw) | |
except Exception as e: | |
log(f"β Error in handle_new_message: {e}") | |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin." | |
async def handle_parameter_followup(session: Session, user_input: str) -> str: | |
"""Handle parameter collection followup - for WebSocket""" | |
try: | |
if not session.intent_config: | |
log("β οΈ No intent config in session") | |
session.reset_flow() | |
return "ΓzgΓΌnΓΌm, hangi iΕlem iΓ§in bilgi istediΔimi unuttum. BaΕtan baΕlayalΔ±m." | |
intent_config = session.intent_config | |
# Get project config | |
project = next((p for p in cfg.projects if p.name == session.project_name), None) | |
if not project: | |
return "Proje konfigΓΌrasyonu bulunamadΔ±." | |
# Try to extract parameters from user message | |
param_prompt = f""" | |
Extract parameters from user message: "{user_input}" | |
Expected parameters: | |
{json.dumps([{ | |
'name': p.name, | |
'type': p.type, | |
'required': p.required, | |
'extraction_prompt': p.extraction_prompt | |
} for p in intent_config.parameters if p.variable_name not in session.variables], ensure_ascii=False)} | |
Return as JSON object with parameter names as keys. | |
""" | |
raw = await llm_generate(session, param_prompt, user_input) | |
_extract_parameters_from_response(raw, session, intent_config) | |
# Check what parameters are still missing | |
missing_params = [ | |
p for p in intent_config.parameters | |
if p.required and p.variable_name not in session.variables | |
] | |
if not missing_params: | |
# All parameters collected, execute API | |
return await _execute_api_call(session, intent_config) | |
else: | |
# Still need more parameters | |
param_prompt = build_parameter_prompt( | |
intent_config, | |
session.variables, | |
session.chat_history, | |
project.default_locale | |
) | |
param_question = await llm_generate(session, param_prompt, user_input) | |
return _trim_response(param_question) | |
except Exception as e: | |
log(f"β Error in handle_parameter_followup: {e}") | |
session.reset_flow() | |
return "Bir hata oluΕtu. LΓΌtfen tekrar deneyin." | |
# Initialize LLM on module load | |
setup_llm_provider() |