Spaces:
Building
Building
""" | |
Flare – Chat Handler (start_session & chat) | |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
Intent → Parametre → API → Humanize akışı | |
""" | |
from __future__ import annotations | |
import json | |
from typing import Dict, Optional | |
from fastapi import APIRouter, HTTPException | |
from pydantic import BaseModel | |
from config_provider import ConfigProvider, VersionConfig | |
from prompt_builder import build_intent_prompt, build_param_prompt, build_api_humanize_prompt | |
from api_executor import call_api | |
from session import session_store, Session | |
from utils import log | |
cfg = ConfigProvider.get() | |
router = APIRouter() | |
# --------- Body models ---------- | |
class SessionStartRequest(BaseModel): | |
project_name: str | |
class SessionStartResponse(BaseModel): | |
session_id: str | |
class ChatRequest(BaseModel): | |
session_id: str | |
user_input: str | |
class ChatResponse(BaseModel): | |
reply: str | |
session: Dict | |
# --------- LLM stub ------------- | |
def ask_llm(prompt: str, user_input: str = "", mode: str = "text"): | |
log(f"🤖 [LLM-stub] mode={mode} prompt_len={len(prompt)}") | |
if mode == "classification": | |
return "" | |
if mode == "json": | |
return {} | |
return "LLM yanıtı (stub)." | |
# --------- Helpers -------------- | |
def _get_live_version(project_name: str) -> VersionConfig: | |
proj = next((p for p in cfg.projects if p.name == project_name and p.enabled), None) | |
if proj is None: | |
raise HTTPException(status_code=400, detail="Project not found or disabled") | |
v = max((v for v in proj.versions if v.published), key=lambda x: x.id, default=None) | |
if v is None: | |
raise HTTPException(status_code=400, detail="No published version") | |
return v | |
# --------- Endpoints ------------ | |
def start_session(body: SessionStartRequest): | |
session = session_store.create_session(body.project_name) | |
return SessionStartResponse(session_id=session.session_id) | |
def chat(body: ChatRequest): | |
session: Session | None = session_store.get_session(body.session_id) | |
if session is None: | |
raise HTTPException(status_code=404, detail="Session not found") | |
version = _get_live_version(session.project_name) | |
user_input = body.user_input | |
session.add_turn("user", user_input) | |
# 1) Intent detection loop | |
intent = None | |
for it in version.intents: | |
prompt = build_intent_prompt(version.general_prompt, it) | |
resp = ask_llm(prompt, user_input, mode="classification") | |
if it.name.lower() in resp.lower(): | |
intent = it | |
break | |
if intent is None: | |
reply = ask_llm(version.general_prompt, user_input) | |
session.add_turn("assistant", reply) | |
return ChatResponse(reply=reply, session=session.to_dict()) | |
# 2) Parameter extraction | |
vars_ = session.variables.copy() | |
missing = [p for p in intent.parameters if p.name not in vars_] | |
if missing: | |
prompt = build_param_prompt(version.general_prompt, intent, missing) | |
llm_json = ask_llm(prompt, user_input, mode="json") or {} | |
vars_.update(llm_json) | |
still_missing = [p.name for p in intent.parameters if p.required and p.name not in vars_] | |
if still_missing: | |
ask = " ".join(f"{p} değerini söyler misin?" for p in still_missing) | |
reply = ask_llm(version.general_prompt, ask) | |
session.variables = vars_ | |
session.awaiting_parameters = still_missing | |
session.add_turn("assistant", reply) | |
return ChatResponse(reply=reply, session=session.to_dict()) | |
# 3) API call | |
api_cfg = cfg.get_api(intent.action) | |
if api_cfg is None: | |
raise HTTPException(status_code=500, detail="API not configured") | |
try: | |
resp = call_api(api_cfg, vars_) | |
except Exception as e: | |
log(f"❌ API error: {e}") | |
reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu." | |
session.add_turn("assistant", reply) | |
return ChatResponse(reply=reply, session=session.to_dict()) | |
# 4) Humanize | |
human_prompt = build_api_humanize_prompt( | |
version.general_prompt, | |
api_cfg.response_prompt or "", | |
json.dumps(resp.json(), ensure_ascii=False, indent=2), | |
) | |
reply = ask_llm(human_prompt) | |
session.variables = vars_ | |
session.add_turn("assistant", reply) | |
return ChatResponse(reply=reply, session=session.to_dict()) | |