Spaces:
Building
Building
File size: 3,235 Bytes
dd6bafd 0b05b65 dd6bafd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
"""
Flare – Chat Handler
~~~~~~~~~~~~~~~~~~~~
Intent → Parametre → API → Humanize akışı.
"""
import json
import re
from datetime import datetime
from typing import Dict, List, Optional
from fastapi import APIRouter, HTTPException
from config_provider import (ConfigProvider, IntentConfig,
ParameterConfig, VersionConfig)
from llm_connector import ask_llm
from prompt_builder import (build_intent_prompt, build_param_prompt,
build_api_humanize_prompt)
from api_executor import call_api
from session import SessionStore
from utils import log
router = APIRouter()
cfg = ConfigProvider.get()
# ==================== Helpers ===============================================
def _detect_intent(version: VersionConfig, user_input: str) -> Optional[IntentConfig]:
for intent in version.intents:
prompt = build_intent_prompt(version.general_prompt, intent)
llm_resp = ask_llm(prompt, user_input, mode="classification")
if intent.name in llm_resp.lower():
return intent
return None
def _extract_params(intent: IntentConfig, version: VersionConfig,
user_input: str,
current_vars: Dict[str, str]) -> Dict[str, str]:
missing = [p for p in intent.parameters
if p.name not in current_vars]
if not missing:
return current_vars
prompt_base = build_intent_prompt(version.general_prompt, intent)
prompt = build_param_prompt(prompt_base, missing)
llm_json = ask_llm(prompt, user_input, mode="json")
current_vars.update(llm_json)
return current_vars
# ==================== Endpoint ==============================================
@router.post("/chat")
def chat(session_id: str, user_input: str):
session = SessionStore.get(session_id)
project = cfg.projects[0] # simplistic: first project
version = next(v for v in project.versions if v.published)
# 1) Intent Detection
intent = _detect_intent(version, user_input)
if not intent:
reply = ask_llm(version.general_prompt, user_input)
session.add_turn("assistant", reply)
return {"reply": reply}
# 2) Param Extraction
vars_ = session.variables
vars_ = _extract_params(intent, version, user_input, vars_)
# 3) Eksik parametre kontrolü
missing = [p for p in intent.parameters if p.required and p.name not in vars_]
if missing:
ask_prompts = "\n".join(p.invalid_prompt or f"{p.name} gerekti" for p in missing)
reply = ask_llm(version.general_prompt, ask_prompts)
return {"reply": reply}
# 4) API
api_cfg = cfg.apis[intent.action]
try:
resp = call_api(api_cfg, vars_)
except Exception as e:
log(f"❌ API error: {e}")
reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu."
return {"reply": reply}
# 5) Humanize
human_prompt = build_api_humanize_prompt(
version.general_prompt,
api_cfg.response_prompt,
json.dumps(resp.json(), ensure_ascii=False, indent=2)
)
reply = ask_llm(human_prompt, "")
session.add_turn("assistant", reply)
return {"reply": reply, "session": session.to_dict()}
|