ciyidogan commited on
Commit
a252004
·
verified ·
1 Parent(s): e3456a9

Update chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +57 -75
chat_handler.py CHANGED
@@ -1,8 +1,7 @@
1
- """Flare – Chat / Intent Orchestration Endpoints
2
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3
- • POST /start_session → yeni session_id döner
4
- POST /chat intentparam → API → humanize akışı
5
- LLM entegrasyonu şimdilik stub.
6
  """
7
 
8
  from __future__ import annotations
@@ -13,25 +12,18 @@ from typing import Dict, Optional
13
  from fastapi import APIRouter, HTTPException
14
  from pydantic import BaseModel
15
 
16
- from config_provider import (
17
- ConfigProvider,
18
- IntentConfig,
19
- ParameterConfig,
20
- VersionConfig,
21
- )
22
- from prompt_builder import (
23
- build_intent_prompt,
24
- build_param_prompt,
25
- build_api_humanize_prompt,
26
- )
27
  from api_executor import call_api
28
  from session import session_store, Session
29
  from utils import log
30
 
31
- router = APIRouter()
32
  cfg = ConfigProvider.get()
33
 
34
- # ---------------- Pydantic bodies ----------------
 
 
 
35
  class SessionStartRequest(BaseModel):
36
  project_name: str
37
 
@@ -50,44 +42,28 @@ class ChatResponse(BaseModel):
50
  session: Dict
51
 
52
 
53
- # --------------- TEMPORARY LLM stub ---------------
54
- def ask_llm(prompt: str, user_input: str, mode: str = "text"):
55
- log(f"🤖 [STUB ask_llm] mode={mode} prompt_len={len(prompt)}")
56
  if mode == "classification":
57
- return "" # intent yokmuş gibi davran
58
  if mode == "json":
59
  return {}
60
- return "Bu bir test yanıtıdır (LLM stub)."
61
-
62
-
63
- # ---------------- Helper funcs --------------------
64
- def _detect_intent(version: VersionConfig, user_input: str) -> Optional[IntentConfig]:
65
- for intent in version.intents:
66
- prompt = build_intent_prompt(version.general_prompt, intent)
67
- llm_resp = ask_llm(prompt, user_input, mode="classification")
68
- if intent.name.lower() in llm_resp.lower():
69
- return intent
70
- return None
71
-
72
 
73
- def _extract_params(
74
- intent: IntentConfig,
75
- version: VersionConfig,
76
- user_input: str,
77
- current_vars: Dict[str, str],
78
- ) -> Dict[str, str]:
79
- missing = [p for p in intent.parameters if p.name not in current_vars]
80
- if not missing:
81
- return current_vars
82
 
83
- prompt_base = build_intent_prompt(version.general_prompt, intent)
84
- prompt = build_param_prompt(prompt_base, missing)
85
- llm_json = ask_llm(prompt, user_input, mode="json") or {}
86
- current_vars.update(llm_json)
87
- return current_vars
 
 
 
 
88
 
89
 
90
- # ---------------- Endpoints -----------------------
91
  @router.post("/start_session", response_model=SessionStartResponse)
92
  def start_session(body: SessionStartRequest):
93
  session = session_store.create_session(body.project_name)
@@ -100,41 +76,46 @@ def chat(body: ChatRequest):
100
  if session is None:
101
  raise HTTPException(status_code=404, detail="Session not found")
102
 
103
- # ---- project & version seçimi ----
104
- project = next((p for p in cfg.projects if p.name == session.project_name), None)
105
- if project is None:
106
- raise HTTPException(status_code=400, detail="Project not configured")
107
-
108
- version = next((v for v in project.versions if v.published), None)
109
- if version is None:
110
- raise HTTPException(status_code=400, detail="No published version")
111
-
112
  user_input = body.user_input
113
  session.add_turn("user", user_input)
114
 
115
- # 1) Intent detection
116
- intent = _detect_intent(version, user_input)
 
 
 
 
 
 
 
117
  if intent is None:
118
  reply = ask_llm(version.general_prompt, user_input)
119
  session.add_turn("assistant", reply)
120
  return ChatResponse(reply=reply, session=session.to_dict())
121
 
122
- # 2) Param extraction
123
- vars_ = _extract_params(intent, version, user_input, session.variables)
124
-
125
- # 3) Missing params?
126
- missing = [p for p in intent.parameters if p.required and p.name not in vars_]
127
  if missing:
128
- ask_prompts = "\n".join(
129
- p.invalid_prompt or f"{p.name} bilgisine ihtiyacım var." for p in missing
130
- )
131
- reply = ask_llm(version.general_prompt, ask_prompts)
132
- session.awaiting_parameters = [p.name for p in missing]
 
 
 
 
 
133
  session.add_turn("assistant", reply)
134
  return ChatResponse(reply=reply, session=session.to_dict())
135
 
136
- # 4) API call
137
- api_cfg = cfg.apis[intent.action]
 
 
 
138
  try:
139
  resp = call_api(api_cfg, vars_)
140
  except Exception as e:
@@ -143,12 +124,13 @@ def chat(body: ChatRequest):
143
  session.add_turn("assistant", reply)
144
  return ChatResponse(reply=reply, session=session.to_dict())
145
 
146
- # 5) Humanize
147
  human_prompt = build_api_humanize_prompt(
148
  version.general_prompt,
149
- api_cfg.response_prompt,
150
  json.dumps(resp.json(), ensure_ascii=False, indent=2),
151
  )
152
- reply = ask_llm(human_prompt, "")
 
153
  session.add_turn("assistant", reply)
154
  return ChatResponse(reply=reply, session=session.to_dict())
 
1
+ """
2
+ Flare – Chat Handler (start_session & chat)
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4
+ IntentParametre → API → Humanize akışı
 
5
  """
6
 
7
  from __future__ import annotations
 
12
  from fastapi import APIRouter, HTTPException
13
  from pydantic import BaseModel
14
 
15
+ from config_provider import ConfigProvider, VersionConfig
16
+ from prompt_builder import build_intent_prompt, build_param_prompt, build_api_humanize_prompt
 
 
 
 
 
 
 
 
 
17
  from api_executor import call_api
18
  from session import session_store, Session
19
  from utils import log
20
 
 
21
  cfg = ConfigProvider.get()
22
 
23
+ router = APIRouter()
24
+
25
+
26
+ # --------- Body models ----------
27
  class SessionStartRequest(BaseModel):
28
  project_name: str
29
 
 
42
  session: Dict
43
 
44
 
45
+ # --------- LLM stub -------------
46
+ def ask_llm(prompt: str, user_input: str = "", mode: str = "text"):
47
+ log(f"🤖 [LLM-stub] mode={mode} prompt_len={len(prompt)}")
48
  if mode == "classification":
49
+ return ""
50
  if mode == "json":
51
  return {}
52
+ return "LLM yanıtı (stub)."
 
 
 
 
 
 
 
 
 
 
 
53
 
 
 
 
 
 
 
 
 
 
54
 
55
+ # --------- Helpers --------------
56
+ def _get_live_version(project_name: str) -> VersionConfig:
57
+ proj = next((p for p in cfg.projects if p.name == project_name and p.enabled), None)
58
+ if proj is None:
59
+ raise HTTPException(status_code=400, detail="Project not found or disabled")
60
+ v = max((v for v in proj.versions if v.published), key=lambda x: x.id, default=None)
61
+ if v is None:
62
+ raise HTTPException(status_code=400, detail="No published version")
63
+ return v
64
 
65
 
66
+ # --------- Endpoints ------------
67
  @router.post("/start_session", response_model=SessionStartResponse)
68
  def start_session(body: SessionStartRequest):
69
  session = session_store.create_session(body.project_name)
 
76
  if session is None:
77
  raise HTTPException(status_code=404, detail="Session not found")
78
 
79
+ version = _get_live_version(session.project_name)
 
 
 
 
 
 
 
 
80
  user_input = body.user_input
81
  session.add_turn("user", user_input)
82
 
83
+ # 1) Intent detection loop
84
+ intent = None
85
+ for it in version.intents:
86
+ prompt = build_intent_prompt(version.general_prompt, it)
87
+ resp = ask_llm(prompt, user_input, mode="classification")
88
+ if it.name.lower() in resp.lower():
89
+ intent = it
90
+ break
91
+
92
  if intent is None:
93
  reply = ask_llm(version.general_prompt, user_input)
94
  session.add_turn("assistant", reply)
95
  return ChatResponse(reply=reply, session=session.to_dict())
96
 
97
+ # 2) Parameter extraction
98
+ vars_ = session.variables.copy()
99
+ missing = [p for p in intent.parameters if p.name not in vars_]
 
 
100
  if missing:
101
+ prompt = build_param_prompt(version.general_prompt, intent, missing)
102
+ llm_json = ask_llm(prompt, user_input, mode="json") or {}
103
+ vars_.update(llm_json)
104
+
105
+ still_missing = [p.name for p in intent.parameters if p.required and p.name not in vars_]
106
+ if still_missing:
107
+ ask = " ".join(f"{p} değerini söyler misin?" for p in still_missing)
108
+ reply = ask_llm(version.general_prompt, ask)
109
+ session.variables = vars_
110
+ session.awaiting_parameters = still_missing
111
  session.add_turn("assistant", reply)
112
  return ChatResponse(reply=reply, session=session.to_dict())
113
 
114
+ # 3) API call
115
+ api_cfg = cfg.get_api(intent.action)
116
+ if api_cfg is None:
117
+ raise HTTPException(status_code=500, detail="API not configured")
118
+
119
  try:
120
  resp = call_api(api_cfg, vars_)
121
  except Exception as e:
 
124
  session.add_turn("assistant", reply)
125
  return ChatResponse(reply=reply, session=session.to_dict())
126
 
127
+ # 4) Humanize
128
  human_prompt = build_api_humanize_prompt(
129
  version.general_prompt,
130
+ api_cfg.response_prompt or "",
131
  json.dumps(resp.json(), ensure_ascii=False, indent=2),
132
  )
133
+ reply = ask_llm(human_prompt)
134
+ session.variables = vars_
135
  session.add_turn("assistant", reply)
136
  return ChatResponse(reply=reply, session=session.to_dict())