Spaces:
Building
Building
"""Admin API endpoints for Flare | |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
Provides authentication, project, version, and API management endpoints. | |
""" | |
import os | |
import sys | |
import hashlib | |
import json | |
import jwt | |
import httpx | |
from datetime import datetime, timedelta, timezone | |
from typing import Optional, List, Dict, Any | |
from pathlib import Path | |
import threading | |
import time | |
import bcrypt | |
from fastapi import APIRouter, HTTPException, Depends, Body, Query | |
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials | |
from pydantic import BaseModel, Field | |
from utils import log | |
from config_provider import ConfigProvider | |
from encryption_utils import encrypt, decrypt | |
# ===================== JWT Config ===================== | |
def get_jwt_config(): | |
"""Get JWT configuration based on environment""" | |
work_mode = os.getenv("WORK_MODE", "on-premise") | |
if work_mode == "hfcloud": | |
# Cloud mode - use secrets from environment | |
jwt_secret = os.getenv("JWT_SECRET") | |
if not jwt_secret: | |
log("⚠️ WARNING: JWT_SECRET not found in environment, using fallback") | |
jwt_secret = "flare-admin-secret-key-change-in-production" # Fallback | |
else: | |
# On-premise mode - use .env file | |
from dotenv import load_dotenv | |
load_dotenv() | |
jwt_secret = os.getenv("JWT_SECRET", "flare-admin-secret-key-change-in-production") | |
return { | |
"secret": jwt_secret, | |
"algorithm": os.getenv("JWT_ALGORITHM", "HS256"), | |
"expiration_hours": int(os.getenv("JWT_EXPIRATION_HOURS", "24")) | |
} | |
# ===================== Constants & Config ===================== | |
router = APIRouter(prefix="/api") | |
security = HTTPBearer() | |
# ===================== Models ===================== | |
class LoginRequest(BaseModel): | |
username: str | |
password: str | |
class LoginResponse(BaseModel): | |
token: str | |
username: str | |
class ChangePasswordRequest(BaseModel): | |
current_password: str | |
new_password: str | |
class EnvironmentUpdate(BaseModel): | |
work_mode: str | |
cloud_token: Optional[str] = None | |
spark_endpoint: str | |
internal_prompt: Optional[str] = None | |
tts_engine: str = "no_tts" | |
tts_engine_api_key: Optional[str] = None | |
tts_settings: Optional[Dict[str, Any]] = None | |
stt_engine: str = "no_stt" | |
stt_engine_api_key: Optional[str] = None | |
stt_settings: Optional[Dict[str, Any]] = None | |
parameter_collection_config: Optional[Dict[str, Any]] = None | |
class ProjectCreate(BaseModel): | |
name: str | |
caption: Optional[str] = "" | |
icon: Optional[str] = "folder" | |
description: Optional[str] = "" | |
default_language: str = "Türkçe" # Locale'in name alanı (Türkçe, English vb.) | |
supported_languages: List[str] = Field(default_factory=lambda: ["tr-TR"]) # Locale kodları | |
timezone: str = "Europe/Istanbul" | |
region: str = "tr-TR" | |
class ProjectUpdate(BaseModel): | |
caption: str | |
icon: Optional[str] = "folder" | |
description: Optional[str] = "" | |
default_language: str = "Türkçe" # Locale'in name alanı | |
supported_languages: List[str] = Field(default_factory=lambda: ["tr-TR"]) # Locale kodları | |
timezone: str = "Europe/Istanbul" | |
region: str = "tr-TR" | |
last_update_date: str | |
class VersionCreate(BaseModel): | |
caption: str | |
source_version_id: int | None = None # None → boş template | |
class IntentModel(BaseModel): | |
name: str | |
caption: Optional[str] = "" | |
locale: str = "tr-TR" | |
detection_prompt: str | |
examples: List[str] = [] | |
parameters: List[Dict[str, Any]] = [] | |
action: str | |
fallback_timeout_prompt: Optional[str] = None | |
fallback_error_prompt: Optional[str] = None | |
class VersionUpdate(BaseModel): | |
caption: str | |
general_prompt: str | |
llm: Dict[str, Any] | |
intents: List[IntentModel] | |
last_update_date: str | |
class APICreate(BaseModel): | |
name: str | |
url: str | |
method: str = "POST" | |
headers: Dict[str, str] = {} | |
body_template: Dict[str, Any] = {} | |
timeout_seconds: int = 10 | |
retry: Dict[str, Any] = Field(default_factory=lambda: {"retry_count": 3, "backoff_seconds": 2, "strategy": "static"}) | |
proxy: Optional[str] = None | |
auth: Optional[Dict[str, Any]] = None | |
response_prompt: Optional[str] = None | |
response_mappings: List[Dict[str, Any]] = [] | |
class APIUpdate(BaseModel): | |
url: str | |
method: str | |
headers: Dict[str, str] | |
body_template: Dict[str, Any] | |
timeout_seconds: int | |
retry: Dict[str, Any] | |
proxy: Optional[str] | |
auth: Optional[Dict[str, Any]] | |
response_prompt: Optional[str] | |
response_mappings: List[Dict[str, Any]] = [] | |
last_update_date: str | |
class TestRequest(BaseModel): | |
test_type: str # "all", "ui", "backend", "integration", "spark" | |
class TTSRequest(BaseModel): | |
text: str | |
voice_id: Optional[str] = None | |
model_id: Optional[str] = None | |
output_format: Optional[str] = "mp3_44100_128" | |
class Config: | |
protected_namespaces = () # Pydantic uyarısını susturmak için | |
# ===================== Helpers ===================== | |
def verify_token(credentials: HTTPAuthorizationCredentials = Depends(security)) -> str: | |
"""Verify JWT token and return username""" | |
jwt_config = get_jwt_config() | |
try: | |
payload = jwt.decode( | |
credentials.credentials, | |
jwt_config["secret"], | |
algorithms=[jwt_config["algorithm"]] | |
) | |
username = payload.get("sub") | |
if username is None: | |
raise HTTPException(status_code=401, detail="Invalid token") | |
return username | |
except jwt.ExpiredSignatureError: | |
raise HTTPException(status_code=401, detail="Token expired") | |
except jwt.InvalidTokenError: # Bu genel JWT hatalarını yakalar | |
raise HTTPException(status_code=401, detail="Invalid token") | |
# Utility function to get username for Depends | |
get_username = verify_token | |
def hash_password(password: str, salt: str = None) -> tuple[str, str]: | |
"""Hash password with bcrypt. | |
Returns (hashed_password, salt)""" | |
if salt is None: | |
salt = bcrypt.gensalt().decode('utf-8') | |
# Ensure salt is bytes | |
salt_bytes = salt.encode('utf-8') if isinstance(salt, str) else salt | |
# Hash the password | |
hashed = bcrypt.hashpw(password.encode('utf-8'), salt_bytes) | |
return hashed.decode('utf-8'), salt | |
def verify_password(password: str, hashed: str, salt: str = None) -> bool: | |
"""Verify password against hash""" | |
try: | |
# For bcrypt hashes (they contain salt) | |
if hashed.startswith('$2b$') or hashed.startswith('$2a$'): | |
return bcrypt.checkpw(password.encode('utf-8'), hashed.encode('utf-8')) | |
# For legacy SHA256 hashes | |
return hashlib.sha256(password.encode()).hexdigest() == hashed | |
except Exception as e: | |
log(f"Password verification error: {e}") | |
return False | |
def get_timestamp(): | |
"""Get current timestamp in ISO format with milliseconds""" | |
return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" | |
async def _spark_project_control(action: str, project_name: str, username: str): | |
"""Common function for Spark project control""" | |
if not project_name: | |
raise HTTPException(status_code=400, detail="project_name is required") | |
cfg = ConfigProvider.get() | |
spark_endpoint = str(cfg.global_config.spark_endpoint).rstrip("/") | |
spark_token = _get_spark_token() | |
if not spark_endpoint: | |
raise HTTPException(status_code=400, detail="Spark endpoint not configured") | |
if not spark_token: | |
raise HTTPException(status_code=400, detail="Spark token not configured") | |
headers = { | |
"Authorization": f"Bearer {spark_token}", | |
"Content-Type": "application/json" | |
} | |
try: | |
async with httpx.AsyncClient(timeout=30) as client: | |
# Hepsi POST request olarak gönderiliyor | |
response = await client.post( | |
f"{spark_endpoint}/project/{action}", | |
json={"project_name": project_name}, | |
headers=headers | |
) | |
response.raise_for_status() | |
return response.json() | |
except httpx.HTTPStatusError as e: | |
error_detail = e.response.json() if e.response.text else {"error": str(e)} | |
raise HTTPException(status_code=e.response.status_code, detail=error_detail) | |
except Exception as e: | |
log(f"❌ Spark {action} failed: {e}") | |
raise HTTPException(status_code=500, detail=str(e)) | |
def _get_spark_token() -> Optional[str]: | |
"""Get Spark token based on work_mode""" | |
cfg = ConfigProvider.get() | |
work_mode = cfg.global_config.work_mode | |
if work_mode in ("hfcloud", "cloud"): | |
# Cloud mode - use HuggingFace Secrets | |
token = os.getenv("SPARK_TOKEN") | |
if not token: | |
log("❌ SPARK_TOKEN not found in HuggingFace Secrets!") | |
return token | |
else: | |
# On-premise mode - use .env file | |
from dotenv import load_dotenv | |
load_dotenv() | |
return os.getenv("SPARK_TOKEN") | |
async def notify_spark_manual(project: dict, version: dict, global_config: dict): | |
"""Manual Spark notification (similar to notify_spark but returns response)""" | |
import httpx | |
spark_endpoint = global_config.get("spark_endpoint", "").rstrip("/") | |
spark_token = _get_spark_token() | |
if not spark_endpoint: | |
raise ValueError("Spark endpoint not configured") | |
if not spark_token: | |
raise ValueError("Spark token not configured") | |
work_mode = global_config.get("work_mode", "hfcloud") | |
cloud_token = global_config.get("cloud_token", "") | |
# Decrypt token if needed | |
if cloud_token and cloud_token.startswith("enc:"): | |
cloud_token = decrypt(cloud_token) | |
payload = { | |
"work_mode": work_mode, | |
"cloud_token": cloud_token, | |
"project_name": project["name"], | |
"project_version": version["id"], | |
"repo_id": version["llm"]["repo_id"], | |
"generation_config": version["llm"]["generation_config"], | |
"use_fine_tune": version["llm"]["use_fine_tune"], | |
"fine_tune_zip": version["llm"]["fine_tune_zip"] if version["llm"]["use_fine_tune"] else None | |
} | |
headers = { | |
"Authorization": f"Bearer {spark_token}", | |
"Content-Type": "application/json" | |
} | |
log(f"🚀 Manually notifying Spark about {project['name']} v{version['id']}") | |
async with httpx.AsyncClient(timeout=30) as client: | |
response = await client.post(spark_endpoint + "/startup", json=payload, headers=headers) | |
response.raise_for_status() | |
result = response.json() | |
log(f"✅ Spark manual notification successful: {result.get('message', 'OK')}") | |
return result | |
# ===================== Auth Endpoints ===================== | |
async def login(request: LoginRequest): | |
"""Authenticate user and return JWT token""" | |
cfg = ConfigProvider.get() | |
users = cfg.global_config.users | |
# Find user | |
user = next((u for u in users if u.username == request.username), None) | |
if not user: | |
raise HTTPException(status_code=401, detail="Invalid credentials") | |
# Verify password | |
if not verify_password(request.password, user.password_hash, user.salt): | |
raise HTTPException(status_code=401, detail="Invalid credentials") | |
# Generate JWT token | |
jwt_config = get_jwt_config() | |
payload = { | |
"sub": request.username, | |
"exp": datetime.now(timezone.utc) + timedelta(hours=jwt_config["expiration_hours"]) | |
} | |
token = jwt.encode(payload, jwt_config["secret"], algorithm=jwt_config["algorithm"]) | |
log(f"✅ User '{request.username}' logged in") | |
return LoginResponse(token=token, username=request.username) | |
async def change_password( | |
request: ChangePasswordRequest, | |
username: str = Depends(verify_token) | |
): | |
"""Change user password""" | |
cfg = ConfigProvider.get() | |
users = cfg.global_config.users | |
# Find user | |
user = next((u for u in users if u.username == username), None) | |
if not user: | |
raise HTTPException(status_code=404, detail="User not found") | |
# Verify current password | |
if not verify_password(request.current_password, user.password_hash, user.salt): | |
raise HTTPException(status_code=401, detail="Current password is incorrect") | |
# Hash new password | |
new_hash, new_salt = hash_password(request.new_password) | |
# Update user via ConfigProvider | |
ConfigProvider.update_user_password(username, new_hash, new_salt) | |
log(f"✅ Password changed for user '{username}'") | |
return {"success": True} | |
# ===================== Locales Endpoints ===================== | |
async def get_available_locales(username: str = Depends(verify_token)): | |
"""Get all system-supported locales""" | |
from locale_manager import LocaleManager | |
locales = LocaleManager.get_available_locales_with_names() | |
return { | |
"locales": locales, | |
"default": LocaleManager.get_default_locale() | |
} | |
async def get_locale_details( | |
locale_code: str, | |
username: str = Depends(verify_token) | |
): | |
"""Get detailed information for a specific locale""" | |
from locale_manager import LocaleManager | |
locale_info = LocaleManager.get_locale_details(locale_code) | |
if not locale_info: | |
raise HTTPException(status_code=404, detail=f"Locale '{locale_code}' not found") | |
return locale_info | |
# ===================== Environment Endpoints ===================== | |
async def get_environment(username: str = Depends(verify_token)): | |
"""Get environment configuration""" | |
cfg = ConfigProvider.get() | |
env_config = cfg.global_config | |
return { | |
"work_mode": env_config.work_mode, | |
"cloud_token": env_config.cloud_token or "", | |
"spark_endpoint": str(env_config.spark_endpoint), | |
"internal_prompt": env_config.internal_prompt or "", | |
"tts_engine": env_config.tts_engine, | |
"tts_engine_api_key": env_config.tts_engine_api_key or "", | |
"tts_settings": env_config.get_tts_settings(), | |
"stt_engine": env_config.stt_engine, | |
"stt_engine_api_key": env_config.stt_engine_api_key or "", | |
"stt_settings": env_config.get_stt_settings(), | |
"parameter_collection_config": env_config.parameter_collection_config.model_dump() | |
} | |
async def update_environment( | |
update: EnvironmentUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update environment configuration""" | |
log(f"📝 Updating environment config by {username}") | |
# Token validation based on mode | |
if update.work_mode in ("gpt4o", "gpt4o-mini"): | |
if not update.cloud_token: | |
raise HTTPException(status_code=400, detail="OpenAI API key is required for GPT modes") | |
if not update.cloud_token.startswith("sk-") and not update.cloud_token.startswith("enc:"): | |
raise HTTPException(status_code=400, detail="Invalid OpenAI API key format") | |
elif update.work_mode in ("hfcloud", "cloud"): | |
if not update.cloud_token: | |
raise HTTPException(status_code=400, detail="Cloud token is required for cloud modes") | |
# TTS/STT validation | |
if update.tts_engine not in ("no_tts", "elevenlabs", "blaze"): | |
raise HTTPException(status_code=400, detail="Invalid TTS engine") | |
if update.stt_engine not in ("no_stt", "google", "azure", "amazon", "gpt4o_realtime", "flicker"): | |
raise HTTPException(status_code=400, detail="Invalid STT engine") | |
if update.tts_engine != "no_tts" and not update.tts_engine_api_key: | |
raise HTTPException(status_code=400, detail=f"{update.tts_engine} API key is required") | |
if update.stt_engine != "no_stt" and not update.stt_engine_api_key: | |
raise HTTPException(status_code=400, detail=f"{update.stt_engine} API key or credentials required") | |
# Spark endpoint validation | |
if update.work_mode not in ("gpt4o", "gpt4o-mini") and not update.spark_endpoint: | |
raise HTTPException(status_code=400, detail="Spark endpoint is required for non-GPT modes") | |
# Update via ConfigProvider | |
ConfigProvider.update_environment(update.model_dump(), username) | |
log(f"✅ Environment updated to {update.work_mode} with TTS: {update.tts_engine}, STT: {update.stt_engine} by {username}") | |
return {"success": True} | |
# ===================== Project Endpoints ===================== | |
def list_enabled_projects(): | |
"""Get list of enabled project names for chat""" | |
cfg = ConfigProvider.get() | |
return [p.name for p in cfg.projects if p.enabled and not getattr(p, 'deleted', False)] | |
async def list_projects( | |
include_deleted: bool = False, | |
username: str = Depends(verify_token) | |
): | |
"""List all projects""" | |
cfg = ConfigProvider.get() | |
projects = cfg.projects | |
# Filter deleted if needed | |
if not include_deleted: | |
projects = [p for p in projects if not getattr(p, 'deleted', False)] | |
return [p.model_dump() for p in projects] | |
async def get_project( | |
project_id: int, | |
username: str = Depends(verify_token) | |
): | |
"""Get single project by ID""" | |
project = ConfigProvider.get_project(project_id) | |
if not project or getattr(project, 'deleted', False): | |
raise HTTPException(status_code=404, detail="Project not found") | |
return project.model_dump() | |
async def create_project( | |
project: ProjectCreate, | |
username: str = Depends(verify_token) | |
): | |
"""Create new project with initial version""" | |
# Validate supported languages | |
from locale_manager import LocaleManager | |
invalid_languages = LocaleManager.validate_project_languages(project.supported_languages) | |
if invalid_languages: | |
available_locales = LocaleManager.get_available_locales_with_names() | |
available_codes = [locale['code'] for locale in available_locales] | |
raise HTTPException( | |
status_code=400, | |
detail=f"Unsupported languages: {', '.join(invalid_languages)}. Available languages: {', '.join(available_codes)}" | |
) | |
# Check if default language is in supported languages | |
if not project.supported_languages: | |
raise HTTPException( | |
status_code=400, | |
detail="At least one supported language must be selected" | |
) | |
# Create project via ConfigProvider | |
new_project = ConfigProvider.create_project(project.model_dump(), username) | |
log(f"✅ Project '{project.name}' created by {username}") | |
return new_project.model_dump() | |
async def update_project( | |
project_id: int, | |
update: ProjectUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update project""" | |
# Update via ConfigProvider | |
updated_project = ConfigProvider.update_project(project_id, update.model_dump(), username) | |
log(f"✅ Project '{updated_project.name}' updated by {username}") | |
return updated_project.model_dump() | |
async def delete_project(project_id: int, username: str = Depends(verify_token)): | |
"""Delete project (soft delete)""" | |
ConfigProvider.delete_project(project_id, username) | |
log(f"✅ Project deleted by {username}") | |
return {"success": True} | |
async def toggle_project(project_id: int, username: str = Depends(verify_token)): | |
"""Toggle project enabled status""" | |
enabled = ConfigProvider.toggle_project(project_id, username) | |
log(f"✅ Project {'enabled' if enabled else 'disabled'} by {username}") | |
return {"enabled": enabled} | |
# ===================== Version Endpoints ===================== | |
async def list_versions( | |
project_id: int, | |
include_deleted: bool = False, | |
username: str = Depends(verify_token) | |
): | |
"""List project versions""" | |
project = ConfigProvider.get_project(project_id) | |
if not project: | |
raise HTTPException(status_code=404, detail="Project not found") | |
versions = project.versions | |
# Filter deleted if needed | |
if not include_deleted: | |
versions = [v for v in versions if not getattr(v, 'deleted', False)] | |
return [v.model_dump() for v in versions] | |
async def create_version( | |
project_id: int, | |
version_data: VersionCreate, | |
username: str = Depends(verify_token) | |
): | |
"""Create new version""" | |
new_version = ConfigProvider.create_version(project_id, version_data.model_dump(), username) | |
log(f"✅ Version created for project {project_id} by {username}") | |
return new_version.model_dump() | |
async def update_version( | |
project_id: int, | |
version_id: int, | |
update: VersionUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update version""" | |
updated_version = ConfigProvider.update_version(project_id, version_id, update.model_dump(), username) | |
log(f"✅ Version {version_id} updated for project {project_id} by {username}") | |
return updated_version.model_dump() | |
async def publish_version( | |
project_id: int, | |
version_id: int, | |
username: str = Depends(verify_token) | |
): | |
"""Publish version""" | |
project, version = ConfigProvider.publish_version(project_id, version_id, username) | |
log(f"✅ Version {version_id} published for project '{project.name}' by {username}") | |
# Notify Spark if project is enabled | |
if project.enabled: | |
try: | |
cfg = ConfigProvider.get() | |
await notify_spark_manual( | |
project.model_dump(), | |
version.model_dump(), | |
cfg.global_config.model_dump() | |
) | |
except Exception as e: | |
log(f"⚠️ Failed to notify Spark: {e}") | |
# Don't fail the publish | |
return {"success": True} | |
async def delete_version( | |
project_id: int, | |
version_id: int, | |
username: str = Depends(verify_token) | |
): | |
"""Delete version (soft delete)""" | |
ConfigProvider.delete_version(project_id, version_id, username) | |
log(f"✅ Version {version_id} deleted for project {project_id} by {username}") | |
return {"success": True} | |
async def validate_regex( | |
request: dict = Body(...), | |
username: str = Depends(verify_token) | |
): | |
"""Validate regex pattern""" | |
pattern = request.get("pattern", "") | |
test_value = request.get("test_value", "") | |
try: | |
import re | |
compiled_regex = re.compile(pattern) | |
matches = bool(compiled_regex.match(test_value)) | |
return { | |
"valid": True, | |
"matches": matches, | |
"pattern": pattern, | |
"test_value": test_value | |
} | |
except Exception as e: | |
return { | |
"valid": False, | |
"matches": False, | |
"error": str(e), | |
"pattern": pattern, | |
"test_value": test_value | |
} | |
# ===================== API Endpoints ===================== | |
async def list_apis( | |
include_deleted: bool = False, | |
username: str = Depends(verify_token) | |
): | |
"""List all APIs""" | |
cfg = ConfigProvider.get() | |
apis = cfg.apis | |
# Filter deleted if needed | |
if not include_deleted: | |
apis = [a for a in apis if not getattr(a, 'deleted', False)] | |
return [a.model_dump() for a in apis] | |
async def create_api(api: APICreate, username: str = Depends(verify_token)): | |
"""Create new API""" | |
new_api = ConfigProvider.create_api(api.model_dump(), username) | |
log(f"✅ API '{api.name}' created by {username}") | |
return new_api.model_dump() | |
async def update_api( | |
api_name: str, | |
update: APIUpdate, | |
username: str = Depends(verify_token) | |
): | |
"""Update API""" | |
updated_api = ConfigProvider.update_api(api_name, update.model_dump(), username) | |
log(f"✅ API '{api_name}' updated by {username}") | |
return updated_api.model_dump() | |
async def delete_api(api_name: str, username: str = Depends(verify_token)): | |
"""Delete API (soft delete)""" | |
ConfigProvider.delete_api(api_name, username) | |
log(f"✅ API '{api_name}' deleted by {username}") | |
return {"success": True} | |
# ===================== Spark Integration Endpoints ===================== | |
async def spark_startup(request: dict = Body(...), username: str = Depends(verify_token)): | |
"""Trigger Spark startup for a project""" | |
project_name = request.get("project_name") | |
if not project_name: | |
raise HTTPException(status_code=400, detail="project_name is required") | |
project = ConfigProvider.get_project_by_name(project_name) | |
if not project: | |
raise HTTPException(status_code=404, detail=f"Project not found: {project_name}") | |
# Find published version | |
version = next((v for v in project.versions if v.published), None) | |
if not version: | |
raise HTTPException(status_code=400, detail=f"No published version found for project: {project_name}") | |
# Notify Spark | |
try: | |
cfg = ConfigProvider.get() | |
result = await notify_spark_manual( | |
project.model_dump(), | |
version.model_dump(), | |
cfg.global_config.model_dump() | |
) | |
return {"message": result.get("message", "Spark startup initiated")} | |
except Exception as e: | |
log(f"❌ Spark startup failed: {e}") | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def spark_get_projects(username: str = Depends(verify_token)): | |
"""Get Spark project list""" | |
cfg = ConfigProvider.get() | |
spark_endpoint = str(cfg.global_config.spark_endpoint).rstrip("/") | |
spark_token = _get_spark_token() | |
if not spark_endpoint: | |
raise HTTPException(status_code=400, detail="Spark endpoint not configured") | |
if not spark_token: | |
raise HTTPException(status_code=400, detail="Spark token not configured") | |
headers = { | |
"Authorization": f"Bearer {spark_token}" | |
} | |
try: | |
async with httpx.AsyncClient(timeout=30) as client: | |
response = await client.get(spark_endpoint + "/project/list", headers=headers) | |
response.raise_for_status() | |
return response.json() | |
except Exception as e: | |
log(f"❌ Failed to get Spark projects: {e}") | |
raise HTTPException(status_code=500, detail=str(e)) | |
async def spark_enable_project(request: dict = Body(...), username: str = Depends(verify_token)): | |
"""Enable project in Spark""" | |
return await _spark_project_control("enable", request.get("project_name"), username) | |
async def spark_disable_project(request: dict = Body(...), username: str = Depends(verify_token)): | |
"""Disable project in Spark""" | |
return await _spark_project_control("disable", request.get("project_name"), username) | |
async def spark_delete_project(project_name: str, username: str = Depends(verify_token)): | |
"""Delete project from Spark""" | |
return await _spark_project_control("delete", project_name, username) | |
# ===================== Test Endpoints ===================== | |
async def test_api(api_data: dict = Body(...), username: str = Depends(verify_token)): | |
"""Test API endpoint with auth support""" | |
import requests | |
import time | |
try: | |
# Extract test request data if provided | |
test_request = api_data.pop("test_request", None) | |
# Parse the APICreate model | |
api = APICreate(**api_data) | |
# Prepare headers | |
headers = api.headers.copy() | |
# Handle authentication if enabled | |
auth_token = None | |
if api.auth and api.auth.get("enabled"): | |
auth_config = api.auth | |
try: | |
log(f"🔑 Fetching auth token for test...") | |
# Make auth request | |
auth_response = requests.post( | |
auth_config["token_endpoint"], | |
json=auth_config.get("token_request_body", {}), | |
timeout=10 | |
) | |
auth_response.raise_for_status() | |
# Extract token from response | |
auth_json = auth_response.json() | |
token_path = auth_config.get("response_token_path", "token").split(".") | |
auth_token = auth_json | |
for path_part in token_path: | |
auth_token = auth_token.get(path_part) | |
if auth_token is None: | |
raise ValueError(f"Token not found at path: {auth_config.get('response_token_path')}") | |
# Add token to headers | |
headers["Authorization"] = f"Bearer {auth_token}" | |
log(f"✅ Auth token obtained: {auth_token[:20]}...") | |
except Exception as e: | |
log(f"❌ Auth failed during test: {e}") | |
return { | |
"success": False, | |
"error": f"Authentication failed: {str(e)}" | |
} | |
# Use test_request if provided, otherwise use body_template | |
request_body = test_request if test_request is not None else api.body_template | |
# Make the actual API request | |
start_time = time.time() | |
# Determine how to send the body based on method | |
if api.method in ["POST", "PUT", "PATCH"]: | |
response = requests.request( | |
method=api.method, | |
url=api.url, | |
headers=headers, | |
json=request_body, | |
timeout=api.timeout_seconds, | |
proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
) | |
elif api.method == "GET": | |
response = requests.request( | |
method=api.method, | |
url=api.url, | |
headers=headers, | |
params=request_body if isinstance(request_body, dict) else None, | |
timeout=api.timeout_seconds, | |
proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
) | |
else: # DELETE, HEAD, etc. | |
response = requests.request( | |
method=api.method, | |
url=api.url, | |
headers=headers, | |
timeout=api.timeout_seconds, | |
proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
) | |
response_time = int((time.time() - start_time) * 1000) | |
# Prepare response body | |
try: | |
response_body = response.json() | |
except: | |
response_body = response.text | |
# Check if request was successful (2xx status codes) | |
is_success = 200 <= response.status_code < 300 | |
# Extract values if response mappings are defined | |
extracted_values = [] | |
if api.response_mappings and isinstance(response_body, dict): | |
from jsonpath_ng import parse | |
for mapping in api.response_mappings: | |
try: | |
jsonpath_expr = parse(mapping['json_path']) | |
matches = jsonpath_expr.find(response_body) | |
value = matches[0].value if matches else None | |
extracted_values.append({ | |
"variable_name": mapping['variable_name'], | |
"value": value, | |
"type": mapping['type'], | |
"caption": mapping.get('caption', '') | |
}) | |
except Exception as e: | |
log(f"Failed to extract {mapping['variable_name']}: {e}") | |
extracted_values.append({ | |
"variable_name": mapping['variable_name'], | |
"value": None, | |
"error": str(e), | |
"type": mapping['type'], | |
"caption": mapping.get('caption', '') | |
}) | |
result = { | |
"success": is_success, | |
"status_code": response.status_code, | |
"response_time": response_time, | |
"response_body": response_body, | |
"response_headers": dict(response.headers), | |
"request_body": request_body, | |
"request_headers": headers | |
} | |
# Add extracted values if any | |
if extracted_values: | |
result["extracted_values"] = extracted_values | |
# Add error info for non-2xx responses | |
if not is_success: | |
result["error"] = f"HTTP {response.status_code}: {response.reason}" | |
log(f"📋 Test result: {response.status_code} in {response_time}ms") | |
return result | |
except requests.exceptions.Timeout: | |
return { | |
"success": False, | |
"error": f"Request timed out after {api.timeout_seconds} seconds" | |
} | |
except requests.exceptions.ConnectionError as e: | |
return { | |
"success": False, | |
"error": f"Connection error: {str(e)}" | |
} | |
except Exception as e: | |
log(f"❌ Test API error: {e}") | |
return { | |
"success": False, | |
"error": str(e) | |
} | |
async def run_all_tests( | |
request: TestRequest, | |
username: str = Depends(verify_token) | |
): | |
"""Run all tests""" | |
# TODO: Implement test runner | |
return { | |
"status": "completed", | |
"total": 10, | |
"passed": 8, | |
"failed": 2, | |
"details": [] | |
} | |
# ===================== Import/Export Endpoints ===================== | |
async def import_project( | |
project_data: dict = Body(...), | |
username: str = Depends(verify_token) | |
): | |
"""Import project from JSON""" | |
imported_project = ConfigProvider.import_project(project_data, username) | |
log(f"✅ Project '{imported_project.name}' imported by {username}") | |
return imported_project.model_dump() | |
async def export_project( | |
project_id: int, | |
username: str = Depends(verify_token) | |
): | |
"""Export project as JSON""" | |
export_data = ConfigProvider.export_project(project_id, username) | |
log(f"✅ Project exported by {username}") | |
return export_data | |
# ===================== TTS Endpoints ===================== | |
async def generate_tts( | |
request: TTSRequest, | |
username: str = Depends(verify_token) | |
): | |
"""Generate TTS audio from text""" | |
try: | |
# ConfigProvider'dan güncel config'i al (reload yerine get kullan) | |
cfg = ConfigProvider.get() | |
tts_engine = cfg.global_config.tts_engine | |
log(f"🔧 TTS Engine: {tts_engine}") | |
if tts_engine == "no_tts": | |
raise HTTPException(status_code=400, detail="TTS is not configured") | |
# Get decrypted API key | |
api_key = cfg.global_config.get_tts_api_key() | |
if not api_key: | |
log("❌ TTS API key not found in config") | |
raise HTTPException(status_code=400, detail="TTS API key not configured") | |
# Import here to avoid circular dependency | |
from tts_interface import create_tts_provider | |
tts_provider = create_tts_provider(tts_engine, api_key) | |
if not tts_provider: | |
raise HTTPException(status_code=500, detail="Failed to create TTS provider") | |
log(f"🎤 Generating TTS for {len(request.text)} characters using {tts_engine}") | |
log(f"📝 Voice: {request.voice_id}, Model: {request.model_id}, Format: {request.output_format}") | |
# Generate audio | |
audio_data = await tts_provider.synthesize( | |
text=request.text, | |
voice_id=request.voice_id, | |
model_id=request.model_id, | |
output_format=request.output_format | |
) | |
# Return audio data | |
from fastapi.responses import Response | |
content_type = "audio/mpeg" if request.output_format and request.output_format.startswith("mp3") else "audio/wav" | |
return Response( | |
content=audio_data, | |
media_type=content_type, | |
headers={ | |
"Content-Disposition": f"attachment; filename=tts_output.{request.output_format.split('_')[0] if request.output_format else 'mp3'}" | |
} | |
) | |
except HTTPException: | |
raise | |
except Exception as e: | |
log(f"❌ TTS generation error: {str(e)}") | |
import traceback | |
log(traceback.format_exc()) | |
raise HTTPException(status_code=500, detail=f"TTS generation failed: {str(e)}") | |
# ===================== Activity Log Endpoints ===================== | |
async def get_activity_log( | |
limit: int = Query(100, ge=1, le=1000), | |
username: str = Depends(verify_token) | |
): | |
"""Get activity log""" | |
cfg = ConfigProvider.get() | |
logs = cfg.activity_log | |
# Return latest entries | |
return [log.model_dump() for log in logs[-limit:]] | |
# ===================== Cleanup Task ===================== | |
def cleanup_old_logs(): | |
"""Cleanup old activity logs (runs in background)""" | |
while True: | |
try: | |
ConfigProvider.cleanup_activity_logs() | |
log("🧹 Cleaned up old activity logs") | |
except Exception as e: | |
log(f"Error in cleanup task: {e}") | |
# Run every hour | |
time.sleep(3600) | |
def start_cleanup_task(): | |
"""Start cleanup task in background""" | |
cleanup_thread = threading.Thread(target=cleanup_old_logs, daemon=True) | |
cleanup_thread.start() | |
log("🧹 Started activity log cleanup task") |