Spaces:
Building
Building
File size: 4,457 Bytes
a252004 dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd a252004 dd6bafd 447c15c 0b05b65 dd6bafd a252004 447c15c a252004 447c15c a252004 447c15c a252004 dd6bafd a252004 dd6bafd a252004 447c15c a252004 447c15c a252004 447c15c dd6bafd 447c15c dd6bafd a252004 dd6bafd a252004 447c15c dd6bafd a252004 dd6bafd 447c15c dd6bafd a252004 dd6bafd a252004 447c15c dd6bafd a252004 dd6bafd 447c15c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
"""
Flare – Chat Handler (start_session & chat)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Intent → Parametre → API → Humanize akışı
"""
from __future__ import annotations
import json
from typing import Dict, Optional
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from config_provider import ConfigProvider, VersionConfig
from prompt_builder import build_intent_prompt, build_param_prompt, build_api_humanize_prompt
from api_executor import call_api
from session import session_store, Session
from utils import log
cfg = ConfigProvider.get()
router = APIRouter()
# --------- Body models ----------
class SessionStartRequest(BaseModel):
project_name: str
class SessionStartResponse(BaseModel):
session_id: str
class ChatRequest(BaseModel):
session_id: str
user_input: str
class ChatResponse(BaseModel):
reply: str
session: Dict
# --------- LLM stub -------------
def ask_llm(prompt: str, user_input: str = "", mode: str = "text"):
log(f"🤖 [LLM-stub] mode={mode} prompt_len={len(prompt)}")
if mode == "classification":
return ""
if mode == "json":
return {}
return "LLM yanıtı (stub)."
# --------- Helpers --------------
def _get_live_version(project_name: str) -> VersionConfig:
proj = next((p for p in cfg.projects if p.name == project_name and p.enabled), None)
if proj is None:
raise HTTPException(status_code=400, detail="Project not found or disabled")
v = max((v for v in proj.versions if v.published), key=lambda x: x.id, default=None)
if v is None:
raise HTTPException(status_code=400, detail="No published version")
return v
# --------- Endpoints ------------
@router.post("/start_session", response_model=SessionStartResponse)
def start_session(body: SessionStartRequest):
session = session_store.create_session(body.project_name)
return SessionStartResponse(session_id=session.session_id)
@router.post("/chat", response_model=ChatResponse)
def chat(body: ChatRequest):
session: Session | None = session_store.get_session(body.session_id)
if session is None:
raise HTTPException(status_code=404, detail="Session not found")
version = _get_live_version(session.project_name)
user_input = body.user_input
session.add_turn("user", user_input)
# 1) Intent detection loop
intent = None
for it in version.intents:
prompt = build_intent_prompt(version.general_prompt, it)
resp = ask_llm(prompt, user_input, mode="classification")
if it.name.lower() in resp.lower():
intent = it
break
if intent is None:
reply = ask_llm(version.general_prompt, user_input)
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 2) Parameter extraction
vars_ = session.variables.copy()
missing = [p for p in intent.parameters if p.name not in vars_]
if missing:
prompt = build_param_prompt(version.general_prompt, intent, missing)
llm_json = ask_llm(prompt, user_input, mode="json") or {}
vars_.update(llm_json)
still_missing = [p.name for p in intent.parameters if p.required and p.name not in vars_]
if still_missing:
ask = " ".join(f"{p} değerini söyler misin?" for p in still_missing)
reply = ask_llm(version.general_prompt, ask)
session.variables = vars_
session.awaiting_parameters = still_missing
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 3) API call
api_cfg = cfg.get_api(intent.action)
if api_cfg is None:
raise HTTPException(status_code=500, detail="API not configured")
try:
resp = call_api(api_cfg, vars_)
except Exception as e:
log(f"❌ API error: {e}")
reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu."
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 4) Humanize
human_prompt = build_api_humanize_prompt(
version.general_prompt,
api_cfg.response_prompt or "",
json.dumps(resp.json(), ensure_ascii=False, indent=2),
)
reply = ask_llm(human_prompt)
session.variables = vars_
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
|