flare / chat_handler.py
ciyidogan's picture
Update chat_handler.py
447c15c verified
raw
history blame
5 kB
"""Flare – Chat / Intent Orchestration Endpoints
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
• POST /start_session → yeni session_id döner
• POST /chat → intent → param → API → humanize akışı
LLM entegrasyonu şimdilik stub.
"""
from __future__ import annotations
import json
from typing import Dict, Optional
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from config_provider import (
ConfigProvider,
IntentConfig,
ParameterConfig,
VersionConfig,
)
from prompt_builder import (
build_intent_prompt,
build_param_prompt,
build_api_humanize_prompt,
)
from api_executor import call_api
from session import session_store, Session
from utils import log
router = APIRouter()
cfg = ConfigProvider.get()
# ---------------- Pydantic bodies ----------------
class SessionStartRequest(BaseModel):
project_name: str
class SessionStartResponse(BaseModel):
session_id: str
class ChatRequest(BaseModel):
session_id: str
user_input: str
class ChatResponse(BaseModel):
reply: str
session: Dict
# --------------- TEMPORARY LLM stub ---------------
def ask_llm(prompt: str, user_input: str, mode: str = "text"):
log(f"🤖 [STUB ask_llm] mode={mode} prompt_len={len(prompt)}")
if mode == "classification":
return "" # intent yokmuş gibi davran
if mode == "json":
return {}
return "Bu bir test yanıtıdır (LLM stub)."
# ---------------- Helper funcs --------------------
def _detect_intent(version: VersionConfig, user_input: str) -> Optional[IntentConfig]:
for intent in version.intents:
prompt = build_intent_prompt(version.general_prompt, intent)
llm_resp = ask_llm(prompt, user_input, mode="classification")
if intent.name.lower() in llm_resp.lower():
return intent
return None
def _extract_params(
intent: IntentConfig,
version: VersionConfig,
user_input: str,
current_vars: Dict[str, str],
) -> Dict[str, str]:
missing = [p for p in intent.parameters if p.name not in current_vars]
if not missing:
return current_vars
prompt_base = build_intent_prompt(version.general_prompt, intent)
prompt = build_param_prompt(prompt_base, missing)
llm_json = ask_llm(prompt, user_input, mode="json") or {}
current_vars.update(llm_json)
return current_vars
# ---------------- Endpoints -----------------------
@router.post("/start_session", response_model=SessionStartResponse)
def start_session(body: SessionStartRequest):
session = session_store.create_session(body.project_name)
return SessionStartResponse(session_id=session.session_id)
@router.post("/chat", response_model=ChatResponse)
def chat(body: ChatRequest):
session: Session | None = session_store.get_session(body.session_id)
if session is None:
raise HTTPException(status_code=404, detail="Session not found")
# ---- project & version seçimi ----
project = next((p for p in cfg.projects if p.name == session.project_name), None)
if project is None:
raise HTTPException(status_code=400, detail="Project not configured")
version = next((v for v in project.versions if v.published), None)
if version is None:
raise HTTPException(status_code=400, detail="No published version")
user_input = body.user_input
session.add_turn("user", user_input)
# 1) Intent detection
intent = _detect_intent(version, user_input)
if intent is None:
reply = ask_llm(version.general_prompt, user_input)
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 2) Param extraction
vars_ = _extract_params(intent, version, user_input, session.variables)
# 3) Missing params?
missing = [p for p in intent.parameters if p.required and p.name not in vars_]
if missing:
ask_prompts = "\n".join(
p.invalid_prompt or f"{p.name} bilgisine ihtiyacım var." for p in missing
)
reply = ask_llm(version.general_prompt, ask_prompts)
session.awaiting_parameters = [p.name for p in missing]
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 4) API call
api_cfg = cfg.apis[intent.action]
try:
resp = call_api(api_cfg, vars_)
except Exception as e:
log(f"❌ API error: {e}")
reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu."
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 5) Humanize
human_prompt = build_api_humanize_prompt(
version.general_prompt,
api_cfg.response_prompt,
json.dumps(resp.json(), ensure_ascii=False, indent=2),
)
reply = ask_llm(human_prompt, "")
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())