Spaces:
Building
Building
"""Admin API endpoints for Flare (Refactored) | |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
Provides authentication, project, version, and API management endpoints with provider support. | |
""" | |
import os | |
import time | |
import threading | |
import hashlib | |
import bcrypt | |
from typing import Optional, Dict, List, Any | |
from datetime import datetime, timedelta, timezone | |
from fastapi import APIRouter, HTTPException, Depends, Query, Response, Body | |
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials | |
from pydantic import BaseModel, Field | |
import httpx | |
from utils import verify_token | |
from config_provider import ConfigProvider | |
from logger import log_info, log_error, log_warning, log_debug | |
from exceptions import ( | |
RaceConditionError, ValidationError, ResourceNotFoundError, | |
AuthenticationError, AuthorizationError | |
) | |
# ===================== Constants & Config ===================== | |
security = HTTPBearer() | |
router = APIRouter(tags=["admin"]) | |
# ===================== Models ===================== | |
class LoginRequest(BaseModel): | |
username: str | |
password: str | |
class LoginResponse(BaseModel): | |
token: str | |
username: str | |
class ChangePasswordRequest(BaseModel): | |
current_password: str | |
new_password: str | |
class ProviderSettingsUpdate(BaseModel): | |
name: str | |
api_key: Optional[str] = None | |
endpoint: Optional[str] = None | |
settings: Dict[str, Any] = Field(default_factory=dict) | |
class EnvironmentUpdate(BaseModel): | |
llm_provider: ProviderSettingsUpdate | |
tts_provider: ProviderSettingsUpdate | |
stt_provider: ProviderSettingsUpdate | |
parameter_collection_config: Optional[Dict[str, Any]] = None | |
class ProjectCreate(BaseModel): | |
name: str | |
caption: Optional[str] = "" | |
icon: Optional[str] = "folder" | |
description: Optional[str] = "" | |
default_locale: str = "tr" | |
supported_locales: List[str] = Field(default_factory=lambda: ["tr"]) | |
timezone: str = "Europe/Istanbul" | |
region: str = "tr-TR" | |
class ProjectUpdate(BaseModel): | |
caption: str | |
icon: Optional[str] = "folder" | |
description: Optional[str] = "" | |
default_locale: str = "tr" | |
supported_locales: List[str] = Field(default_factory=lambda: ["tr"]) | |
timezone: str = "Europe/Istanbul" | |
region: str = "tr-TR" | |
last_update_date: str | |
class VersionCreate(BaseModel): | |
caption: str | |
source_version_no: int | None = None | |
class IntentModel(BaseModel): | |
name: str | |
caption: Optional[str] = "" | |
detection_prompt: str | |
examples: List[Dict[str, str]] = [] # LocalizedExample format | |
parameters: List[Dict[str, Any]] = [] | |
action: str | |
fallback_timeout_prompt: Optional[str] = None | |
fallback_error_prompt: Optional[str] = None | |
class VersionUpdate(BaseModel): | |
caption: str | |
general_prompt: str | |
llm: Dict[str, Any] | |
intents: List[IntentModel] | |
last_update_date: str | |
class APICreate(BaseModel): | |
name: str | |
url: str | |
method: str = "POST" | |
headers: Dict[str, str] = {} | |
body_template: Dict[str, Any] = {} | |
timeout_seconds: int = 10 | |
retry: Dict[str, Any] = Field(default_factory=lambda: {"retry_count": 3, "backoff_seconds": 2, "strategy": "static"}) | |
proxy: Optional[str] = None | |
auth: Optional[Dict[str, Any]] = None | |
response_prompt: Optional[str] = None | |
response_mappings: List[Dict[str, Any]] = [] | |
class APIUpdate(BaseModel): | |
url: str | |
method: str | |
headers: Dict[str, str] | |
body_template: Dict[str, Any] | |
timeout_seconds: int | |
retry: Dict[str, Any] | |
proxy: Optional[str] | |
auth: Optional[Dict[str, Any]] | |
response_prompt: Optional[str] | |
response_mappings: List[Dict[str, Any]] = [] | |
last_update_date: str | |
class TestRequest(BaseModel): | |
test_type: str # "all", "ui", "backend", "integration", "spark" | |
# ===================== Auth Endpoints ===================== | |
async def login(request: LoginRequest): | |
"""User login endpoint""" | |
cfg = ConfigProvider.get() | |
# Find user | |
user = next((u for u in cfg.global_config.users if u.username == request.username), None) | |
if not user: | |
raise HTTPException(status_code=401, detail="Invalid credentials") | |
# Verify password - Try both bcrypt and SHA256 for backward compatibility | |
password_valid = False | |
# First try bcrypt (new format) | |
try: | |
if user.password_hash.startswith("$2b$") or user.password_hash.startswith("$2a$"): | |
password_valid = bcrypt.checkpw(request.password.encode('utf-8'), user.password_hash.encode('utf-8')) | |
except: | |
pass | |
# If not valid, try SHA256 (old format) | |
if not password_valid: | |
sha256_hash = hashlib.sha256(request.password.encode('utf-8')).hexdigest() | |
password_valid = (user.password_hash == sha256_hash) | |
if not password_valid: | |
raise HTTPException(status_code=401, detail="Invalid credentials") | |
# Create token | |
token = create_token(request.username) | |
log_info(f"✅ User '{request.username}' logged in successfully") | |
return LoginResponse(token=token, username=request.username) | |
async def change_password( | |
request: ChangePasswordRequest, | |
username: str = Depends(verify_token) | |
): | |
"""Change user password""" | |
cfg = ConfigProvider.get() | |
# Find user | |
user = next((u for u in cfg.global_config.users if u.username == username), None) | |
if not user: | |
raise HTTPException(status_code=404, detail="User not found") | |
# Verify current password | |
if not bcrypt.checkpw(request.current_password.encode('utf-8'), user.password_hash.encode('utf-8')): | |
raise HTTPException(status_code=401, detail="Current password is incorrect") | |
# Generate new password hash | |
salt = bcrypt.gensalt() | |
new_hash = bcrypt.hashpw(request.new_password.encode('utf-8'), salt) | |
# Update user | |
user.password_hash = new_hash.decode('utf-8') | |
user.salt = salt.decode('utf-8') | |
# Save configuration | |
cfg.save() | |
log_info(f"✅ Password changed for user '{username}'") | |
return {"success": True} | |
# ===================== Locales Endpoints ===================== | |
async def get_available_locales(username: str = Depends(verify_token)): | |
"""Get all system-supported locales""" | |
from locale_manager import LocaleManager | |
locales = LocaleManager.get_available_locales_with_names() | |
return { | |
"locales": locales, | |
"default": LocaleManager.get_default_locale() | |
} | |
async def get_locale_details( | |
locale_code: str, | |
username: str = Depends(verify_token) | |
): | |
"""Get detailed information for a specific locale""" | |
from locale_manager import LocaleManager | |
locale_info = LocaleManager.get_locale_details(locale_code) | |
if not locale_info: | |
raise HTTPException(status_code=404, detail=f"Locale '{locale_code}' not found") | |
return locale_info | |
# ===================== Environment Endpoints ===================== | |
async def get_environment(username: str = Depends(verify_token)): | |
"""Get environment configuration with provider info""" | |
cfg = ConfigProvider.get() | |
env_config = cfg.global_config | |
# Provider tabanlı yeni yapıyı destekle | |
response = {} | |
# LLM Provider - eski yapıdan dönüştür | |
if hasattr(env_config, 'llm_provider'): | |
response["llm_provider"] = env_config.llm_provider.model_dump() | |
else: | |
# Eski yapıdan provider bilgisini oluştur | |
response["llm_provider"] = { | |
"name": env_config.work_mode if env_config.work_mode in ["gpt4o", "gpt4o-mini"] else "spark_cloud", | |
"api_key": env_config.cloud_token or "", | |
"endpoint": str(env_config.spark_endpoint), | |
"settings": { | |
"internal_prompt": getattr(env_config, 'internal_prompt', None) | |
} | |
} | |
# TTS Provider - eski yapıdan dönüştür | |
if hasattr(env_config, 'tts_provider'): | |
response["tts_provider"] = env_config.tts_provider.model_dump() | |
else: | |
response["tts_provider"] = { | |
"name": getattr(env_config, 'tts_engine', 'no_tts'), | |
"api_key": getattr(env_config, 'tts_engine_api_key', None) or "", | |
"endpoint": None, | |
"settings": getattr(env_config, 'tts_settings', {}) | |
} | |
# STT Provider - eski yapıdan dönüştür | |
if hasattr(env_config, 'stt_provider'): | |
response["stt_provider"] = env_config.stt_provider.model_dump() | |
else: | |
response["stt_provider"] = { | |
"name": getattr(env_config, 'stt_engine', 'no_stt'), | |
"api_key": getattr(env_config, 'stt_engine_api_key', None) or "", | |
"endpoint": None, | |
"settings": getattr(env_config, 'stt_settings', {}) | |
} | |
# Provider listesi | |
if hasattr(env_config, 'providers'): | |
response["providers"] = [p.model_dump() for p in env_config.providers] | |
else: | |
# Varsayılan provider listesi | |
response["providers"] = [ | |
{ | |
"type": "llm", | |
"name": "spark_cloud", | |
"display_name": "Spark LLM (Cloud)", | |
"requires_endpoint": True, | |
"requires_api_key": True, | |
"requires_repo_info": False | |
}, | |
{ | |
"type": "llm", | |
"name": "gpt4o", | |
"display_name": "GPT-4o", | |
"requires_endpoint": True, | |
"requires_api_key": True, | |
"requires_repo_info": False | |
}, | |
{ | |
"type": "llm", | |
"name": "gpt4o-mini", | |
"display_name": "GPT-4o Mini", | |
"requires_endpoint": True, | |
"requires_api_key": True, | |
"requires_repo_info": False | |
}, | |
{ | |
"type": "tts", | |
"name": "no_tts", | |
"display_name": "No TTS", | |
"requires_endpoint": False, | |
"requires_api_key": False, | |
"requires_repo_info": False | |
}, | |
{ | |
"type": "tts", | |
"name": "elevenlabs", | |
"display_name": "ElevenLabs", | |
"requires_endpoint": False, | |
"requires_api_key": True, | |
"requires_repo_info": False | |
}, | |
{ | |
"type": "stt", | |
"name": "no_stt", | |
"display_name": "No STT", | |
"requires_endpoint": False, | |
"requires_api_key": False, | |
"requires_repo_info": False | |
}, | |
{ | |
"type": "stt", | |
"name": "google", | |
"display_name": "Google Cloud STT", | |
"requires_endpoint": False, | |
"requires_api_key": True, | |
"requires_repo_info": False | |
} | |
] | |
# Parameter collection config | |
if hasattr(env_config, 'parameter_collection_config'): | |
response["parameter_collection_config"] = env_config.parameter_collection_config.model_dump() | |
else: | |
# Varsayılan değerler | |
response["parameter_collection_config"] = { | |
"max_params_per_question": 2, | |
"retry_unanswered": True, | |
"smart_grouping": True, | |
"collection_prompt": "You are a helpful assistant collecting information from the user..." | |
} | |
return response | |
async def update_environment( | |
update: EnvironmentUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update environment configuration with provider validation""" | |
log_info(f"📝 Updating environment config by {username}") | |
cfg = ConfigProvider.get() | |
# Validate LLM provider | |
llm_provider_def = cfg.global_config.get_provider_config("llm", update.llm_provider.name) | |
if not llm_provider_def: | |
raise HTTPException(status_code=400, detail=f"Unknown LLM provider: {update.llm_provider.name}") | |
if llm_provider_def.requires_api_key and not update.llm_provider.api_key: | |
raise HTTPException(status_code=400, detail=f"{llm_provider_def.display_name} requires API key") | |
if llm_provider_def.requires_endpoint and not update.llm_provider.endpoint: | |
raise HTTPException(status_code=400, detail=f"{llm_provider_def.display_name} requires endpoint") | |
# Validate TTS provider | |
tts_provider_def = cfg.global_config.get_provider_config("tts", update.tts_provider.name) | |
if not tts_provider_def: | |
raise HTTPException(status_code=400, detail=f"Unknown TTS provider: {update.tts_provider.name}") | |
if tts_provider_def.requires_api_key and not update.tts_provider.api_key: | |
raise HTTPException(status_code=400, detail=f"{tts_provider_def.display_name} requires API key") | |
# Validate STT provider | |
stt_provider_def = cfg.global_config.get_provider_config("stt", update.stt_provider.name) | |
if not stt_provider_def: | |
raise HTTPException(status_code=400, detail=f"Unknown STT provider: {update.stt_provider.name}") | |
if stt_provider_def.requires_api_key and not update.stt_provider.api_key: | |
raise HTTPException(status_code=400, detail=f"{stt_provider_def.display_name} requires API key") | |
# Update via ConfigProvider | |
ConfigProvider.update_environment(update.model_dump(), username) | |
log_info(f"✅ Environment updated to LLM: {update.llm_provider.name}, TTS: {update.tts_provider.name}, STT: {update.stt_provider.name} by {username}") | |
return {"success": True} | |
# ===================== Project Endpoints ===================== | |
def list_enabled_projects(): | |
"""Get list of enabled project names for chat""" | |
cfg = ConfigProvider.get() | |
return [p.name for p in cfg.projects if p.enabled and not getattr(p, 'deleted', False)] | |
async def list_projects( | |
include_deleted: bool = False, | |
username: str = Depends(verify_token) | |
): | |
"""List all projects""" | |
cfg = ConfigProvider.get() | |
projects = cfg.projects | |
# Filter deleted if needed | |
if not include_deleted: | |
projects = [p for p in projects if not getattr(p, 'deleted', False)] | |
return [p.model_dump() for p in projects] | |
async def get_project( | |
project_id: int, | |
username: str = Depends(verify_token) | |
): | |
"""Get single project by ID""" | |
project = ConfigProvider.get_project(project_id) | |
if not project or getattr(project, 'deleted', False): | |
raise HTTPException(status_code=404, detail="Project not found") | |
return project.model_dump() | |
async def create_project( | |
project: ProjectCreate, | |
username: str = Depends(verify_token) | |
): | |
"""Create new project with initial version""" | |
# Validate supported locales | |
from locale_manager import LocaleManager | |
invalid_locales = LocaleManager.validate_project_languages(project.supported_locales) | |
if invalid_locales: | |
available_locales = LocaleManager.get_available_locales_with_names() | |
available_codes = [locale['code'] for locale in available_locales] | |
raise HTTPException( | |
status_code=400, | |
detail=f"Unsupported locales: {', '.join(invalid_locales)}. Available locales: {', '.join(available_codes)}" | |
) | |
# Check if default locale is in supported locales | |
if project.default_locale not in project.supported_locales: | |
raise HTTPException( | |
status_code=400, | |
detail="Default locale must be one of the supported locales" | |
) | |
# Create project via ConfigProvider | |
new_project = ConfigProvider.create_project(project.model_dump(), username) | |
log_info(f"✅ Project '{project.name}' created by {username}") | |
return new_project.model_dump() | |
async def update_project( | |
project_id: int, | |
update: ProjectUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update existing project with race condition handling""" | |
try: | |
# Optimistic locking kontrolü | |
result = ConfigProvider.update_project( | |
project_id, | |
update.model_dump(), | |
username, | |
expected_last_update=update.last_update_date | |
) | |
log_info(f"✅ Project {project_id} updated by {username}") | |
return result | |
except RaceConditionError as e: | |
log_warning(f"⚠️ Race condition detected for project {project_id}") | |
raise HTTPException( | |
status_code=409, | |
detail={ | |
"message": e.message, | |
"last_update_user": e.last_update_user, | |
"last_update_date": e.last_update_date, | |
"type": "race_condition" | |
} | |
) | |
except ResourceNotFoundError: | |
raise HTTPException(status_code=404, detail="Project not found") | |
except Exception as e: | |
log_error(f"❌ Error updating project {project_id}", e) | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def delete_project(project_id: int, username: str = Depends(verify_token)): | |
"""Delete project (soft delete)""" | |
ConfigProvider.delete_project(project_id, username) | |
log_info(f"✅ Project deleted by {username}") | |
return {"success": True} | |
async def toggle_project(project_id: int, username: str = Depends(verify_token)): | |
"""Toggle project enabled status""" | |
enabled = ConfigProvider.toggle_project(project_id, username) | |
log_info(f"✅ Project {'enabled' if enabled else 'disabled'} by {username}") | |
return {"enabled": enabled} | |
# ===================== Version Endpoints ===================== | |
async def list_versions( | |
project_id: int, | |
include_deleted: bool = False, | |
username: str = Depends(verify_token) | |
): | |
"""List project versions""" | |
project = ConfigProvider.get_project(project_id) | |
if not project: | |
raise HTTPException(status_code=404, detail="Project not found") | |
versions = project.versions | |
# Filter deleted if needed | |
if not include_deleted: | |
versions = [v for v in versions if not getattr(v, 'deleted', False)] | |
return [v.model_dump() for v in versions] | |
async def create_version( | |
project_id: int, | |
version_data: VersionCreate, | |
username: str = Depends(verify_token) | |
): | |
"""Create new version""" | |
new_version = ConfigProvider.create_version(project_id, version_data.model_dump(), username) | |
log_info(f"✅ Version created for project {project_id} by {username}") | |
return new_version.model_dump() | |
async def update_version( | |
project_id: int, | |
version_no: int, | |
update: VersionUpdate, | |
force: bool = Query(default=False, description="Force update despite conflicts"), | |
username: str = Depends(verify_token) | |
): | |
"""Update version with race condition handling""" | |
try: | |
# Force parametresi kontrolü | |
if force: | |
log_warning(f"⚠️ Force update requested for version {version_no} by {username}") | |
result = ConfigProvider.update_version( | |
project_id, | |
version_no, | |
update.model_dump(), | |
username, | |
expected_last_update=update.last_update_date if not force else None | |
) | |
log_info(f"✅ Version {version_no} updated by {username}") | |
return result | |
except RaceConditionError as e: | |
if force: | |
# Force modunda race condition'ı yoksay | |
result = ConfigProvider.update_version( | |
project_id, | |
version_no, | |
update.model_dump(), | |
username, | |
expected_last_update=None | |
) | |
return result | |
else: | |
log_warning(f"⚠️ Race condition detected for version {version_no}") | |
raise HTTPException( | |
status_code=409, | |
detail={ | |
"message": e.message, | |
"last_update_user": e.last_update_user, | |
"last_update_date": e.last_update_date, | |
"type": "race_condition" | |
} | |
) | |
except ResourceNotFoundError: | |
raise HTTPException(status_code=404, detail="Version not found") | |
except Exception as e: | |
log_error(f"❌ Error updating version {version_no}", e) | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def publish_version( | |
project_id: int, | |
version_no: int, | |
username: str = Depends(verify_token) | |
): | |
"""Publish version""" | |
project, version = ConfigProvider.publish_version(project_id, version_no, username) | |
log_info(f"✅ Version {version_no} published for project '{project.name}' by {username}") | |
# Notify LLM provider if project is enabled and provider requires repo info | |
cfg = ConfigProvider.get() | |
llm_provider_def = cfg.global_config.get_provider_config("llm", cfg.global_config.llm_provider.name) | |
if project.enabled and llm_provider_def and llm_provider_def.requires_repo_info: | |
try: | |
await notify_llm_startup(project, version) | |
except Exception as e: | |
log_error(f"⚠️ Failed to notify LLM provider", e) | |
# Don't fail the publish | |
return {"success": True} | |
async def delete_version( | |
project_id: int, | |
version_no: int, | |
username: str = Depends(verify_token) | |
): | |
"""Delete version (soft delete)""" | |
ConfigProvider.delete_version(project_id, version_no, username) | |
log_info(f"✅ Version {version_no} deleted for project {project_id} by {username}") | |
return {"success": True} | |
async def get_project_versions( | |
project_name: str, | |
username: str = Depends(verify_token) | |
): | |
"""Get all versions of a project for testing""" | |
cfg = ConfigProvider.get() | |
# Find project | |
project = next((p for p in cfg.projects if p.name == project_name), None) | |
if not project: | |
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found") | |
# Return versions with their status | |
versions = [] | |
for v in project.versions: | |
if not getattr(v, 'deleted', False): | |
versions.append({ | |
"version_number": v.no, | |
"caption": v.caption, | |
"published": v.published, | |
"description": getattr(v, 'description', ''), | |
"intent_count": len(v.intents), | |
"created_date": getattr(v, 'created_date', None), | |
"is_current": v.published # Published version is current | |
}) | |
return { | |
"project_name": project_name, | |
"project_caption": project.caption, | |
"versions": versions | |
} | |
async def compare_versions( | |
project_id: int, | |
version1_no: int, | |
version2_no: int, | |
username: str = Depends(verify_token) | |
): | |
"""Compare two versions and return differences""" | |
project = ConfigProvider.get_project(project_id) | |
if not project: | |
raise HTTPException(status_code=404, detail="Project not found") | |
v1 = next((v for v in project.versions if v.no == version1_no), None) | |
v2 = next((v for v in project.versions if v.no == version2_no), None) | |
if not v1 or not v2: | |
raise HTTPException(status_code=404, detail="Version not found") | |
# Deep comparison | |
differences = { | |
'general_prompt': { | |
'changed': v1.general_prompt != v2.general_prompt, | |
'v1': v1.general_prompt, | |
'v2': v2.general_prompt | |
}, | |
'intents': { | |
'added': [], | |
'removed': [], | |
'modified': [] | |
} | |
} | |
# Compare intents | |
v1_intents = {i.name: i for i in v1.intents} | |
v2_intents = {i.name: i for i in v2.intents} | |
# Find added/removed | |
differences['intents']['added'] = list(set(v2_intents.keys()) - set(v1_intents.keys())) | |
differences['intents']['removed'] = list(set(v1_intents.keys()) - set(v2_intents.keys())) | |
# Find modified | |
for intent_name in set(v1_intents.keys()) & set(v2_intents.keys()): | |
i1, i2 = v1_intents[intent_name], v2_intents[intent_name] | |
if i1.model_dump() != i2.model_dump(): | |
differences['intents']['modified'].append({ | |
'name': intent_name, | |
'differences': compare_intent_details(i1, i2) | |
}) | |
log_info( | |
f"Version comparison performed", | |
user=username, | |
project_id=project_id, | |
version1_id=version1_id, | |
version2_id=version2_id | |
) | |
return differences | |
# ===================== API Endpoints ===================== | |
async def list_apis( | |
include_deleted: bool = False, | |
username: str = Depends(verify_token) | |
): | |
"""List all APIs""" | |
cfg = ConfigProvider.get() | |
apis = cfg.apis | |
# Filter deleted if needed | |
if not include_deleted: | |
apis = [a for a in apis if not getattr(a, 'deleted', False)] | |
return [a.model_dump() for a in apis] | |
async def create_api(api: APICreate, username: str = Depends(verify_token)): | |
"""Create new API""" | |
new_api = ConfigProvider.create_api(api.model_dump(), username) | |
log_info(f"✅ API '{api.name}' created by {username}") | |
return new_api.model_dump() | |
async def update_api( | |
api_name: str, | |
update: APIUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update API configuration with race condition handling""" | |
try: | |
result = ConfigProvider.update_api( | |
api_name, | |
update.model_dump(), | |
username, | |
expected_last_update=update.last_update_date | |
) | |
log_info(f"✅ API '{api_name}' updated by {username}") | |
return result | |
except RaceConditionError as e: | |
log_warning(f"⚠️ Race condition detected for API '{api_name}'") | |
raise HTTPException( | |
status_code=409, | |
detail={ | |
"message": e.message, | |
"last_update_user": e.last_update_user, | |
"last_update_date": e.last_update_date, | |
"type": "race_condition" | |
} | |
) | |
except ResourceNotFoundError: | |
raise HTTPException(status_code=404, detail="API not found") | |
except Exception as e: | |
log_error(f"❌ Error updating API '{api_name}'", e) | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def delete_api(api_name: str, username: str = Depends(verify_token)): | |
"""Delete API (soft delete)""" | |
ConfigProvider.delete_api(api_name, username) | |
log_info(f"✅ API '{api_name}' deleted by {username}") | |
return {"success": True} | |
async def validate_regex( | |
request: dict = Body(...), | |
username: str = Depends(verify_token) | |
): | |
"""Validate regex pattern""" | |
pattern = request.get("pattern", "") | |
test_value = request.get("test_value", "") | |
try: | |
import re | |
compiled_regex = re.compile(pattern) | |
matches = bool(compiled_regex.match(test_value)) | |
return { | |
"valid": True, | |
"matches": matches, | |
"pattern": pattern, | |
"test_value": test_value | |
} | |
except Exception as e: | |
return { | |
"valid": False, | |
"matches": False, | |
"error": str(e), | |
"pattern": pattern, | |
"test_value": test_value | |
} | |
# ===================== Test Endpoints ===================== | |
async def run_all_tests( | |
request: TestRequest, | |
username: str = Depends(verify_token) | |
): | |
"""Run all tests""" | |
log_info(f"🧪 Running {request.test_type} tests requested by {username}") | |
# TODO: Implement test runner | |
# For now, return mock results | |
return { | |
"test_run_id": "test_" + datetime.now().isoformat(), | |
"status": "running", | |
"total_tests": 60, | |
"completed": 0, | |
"passed": 0, | |
"failed": 0, | |
"message": "Test run started" | |
} | |
async def get_test_status( | |
test_run_id: str, | |
username: str = Depends(verify_token) | |
): | |
"""Get test run status""" | |
# TODO: Implement test status tracking | |
return { | |
"test_run_id": test_run_id, | |
"status": "completed", | |
"total_tests": 60, | |
"completed": 60, | |
"passed": 57, | |
"failed": 3, | |
"duration": 340.5, | |
"details": [] | |
} | |
# ===================== Activity Log ===================== | |
async def get_activity_log( | |
limit: int = Query(100, ge=1, le=1000), | |
entity_type: Optional[str] = None, | |
username: str = Depends(verify_token) | |
): | |
"""Get activity log""" | |
cfg = ConfigProvider.get() | |
logs = cfg.activity_log | |
# Filter by entity type if specified | |
if entity_type: | |
logs = [l for l in logs if l.entity_type == entity_type] | |
# Return most recent entries | |
return logs[-limit:] | |
# ===================== TTS Endpoints ===================== | |
class TTSRequest(BaseModel): | |
text: str | |
voice_id: Optional[str] = None | |
language: Optional[str] = "tr-TR" | |
async def generate_tts( | |
request: TTSRequest, | |
username: str = Depends(verify_token) | |
): | |
"""Generate TTS audio from text""" | |
from tts_factory import TTSFactory | |
from tts_preprocessor import TTSPreprocessor | |
import base64 | |
try: | |
# Create TTS provider | |
tts_provider = TTSFactory.create_provider() | |
if not tts_provider: | |
# Return empty response for no TTS | |
return Response( | |
content=b"", | |
media_type="audio/mpeg", | |
headers={"X-TTS-Status": "disabled"} | |
) | |
log_info(f"🎤 TTS request: '{request.text[:50]}...' with provider: {tts_provider.get_provider_name()}") | |
# Preprocess text if needed | |
preprocessor = TTSPreprocessor(language=request.language) | |
processed_text = preprocessor.preprocess( | |
request.text, | |
tts_provider.get_preprocessing_flags() | |
) | |
# Generate audio | |
audio_data = await tts_provider.synthesize( | |
text=processed_text, | |
voice_id=request.voice_id | |
) | |
# Return audio as binary response | |
return Response( | |
content=audio_data, | |
media_type="audio/mpeg", | |
headers={ | |
"Content-Disposition": 'inline; filename="tts_output.mp3"', | |
"X-TTS-Provider": tts_provider.get_provider_name() | |
} | |
) | |
except Exception as e: | |
log_error("❌ TTS generation error", e) | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def get_tts_voices(username: str = Depends(verify_token)): | |
"""Get available TTS voices""" | |
from tts_factory import TTSFactory | |
try: | |
tts_provider = TTSFactory.create_provider() | |
if not tts_provider: | |
return {"voices": []} | |
voices = tts_provider.get_supported_voices() | |
# Convert dict to list format | |
voice_list = [ | |
{"id": voice_id, "name": voice_name} | |
for voice_id, voice_name in voices.items() | |
] | |
return {"voices": voice_list} | |
except Exception as e: | |
log_error("❌ Error getting TTS voices", e) | |
return {"voices": []} | |
# ===================== Helper Functions ===================== | |
async def notify_llm_startup(project, version): | |
"""Notify LLM provider about project startup""" | |
from llm_factory import LLMFactory | |
try: | |
llm_provider = LLMFactory.create_provider() | |
# Build project config for startup | |
project_config = { | |
"name": project.name, | |
"version_no": version.no, | |
"repo_id": version.llm.repo_id, | |
"generation_config": version.llm.generation_config, | |
"use_fine_tune": version.llm.use_fine_tune, | |
"fine_tune_zip": version.llm.fine_tune_zip | |
} | |
success = await llm_provider.startup(project_config) | |
if success: | |
log_info(f"✅ LLM provider notified for project '{project.name}'") | |
else: | |
log_info(f"⚠️ LLM provider notification failed for project '{project.name}'") | |
except Exception as e: | |
log_error("❌ Error notifying LLM provider", e) | |
raise | |
def create_token(username: str) -> str: | |
"""Create JWT token with secure random""" | |
import secrets | |
cfg = ConfigProvider.get() | |
# Token için secure random jti (JWT ID) ekle | |
jti = secrets.token_urlsafe(16) | |
payload = { | |
"sub": username, | |
"exp": datetime.now(timezone.utc) + timedelta(hours=24), | |
"iat": datetime.now(timezone.utc), | |
"jti": jti # Unique token ID | |
} | |
# Store token ID for revocation if needed | |
if not hasattr(cfg, '_active_tokens'): | |
cfg._active_tokens = set() | |
cfg._active_tokens.add(jti) | |
secret = os.environ.get("JWT_SECRET", "your-secret-key") | |
token = jwt.encode(payload, secret, algorithm="HS256") | |
if isinstance(token, bytes): | |
token = token.decode() | |
return token | |
# ===================== Cleanup Task ===================== | |
def cleanup_activity_log(): | |
"""Cleanup old activity log entries""" | |
while True: | |
try: | |
cfg = ConfigProvider.get() | |
# Keep only last 30 days | |
cutoff = datetime.now() - timedelta(days=30) | |
cutoff_str = cutoff.isoformat() | |
original_count = len(cfg.activity_log) | |
cfg.activity_log = [ | |
log for log in cfg.activity_log | |
if hasattr(log, 'timestamp') and str(log.timestamp) >= cutoff_str | |
] | |
if len(cfg.activity_log) < original_count: | |
removed = original_count - len(cfg.activity_log) | |
log_info(f"🧹 Cleaned up {removed} old activity log entries") | |
# ConfigProvider.save(cfg, "system") kullanmalıyız | |
ConfigProvider.save(cfg, "system") | |
except Exception as e: | |
log_error("❌ Activity log cleanup error", e) | |
# Run every hour | |
time.sleep(3600) | |
def start_cleanup_task(): | |
"""Start the cleanup task in background""" | |
thread = threading.Thread(target=cleanup_activity_log, daemon=True) | |
thread.start() | |
log_info("🧹 Activity log cleanup task started") |