ciyidogan commited on
Commit
447c15c
·
verified ·
1 Parent(s): 525e493

Update chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +101 -40
chat_handler.py CHANGED
@@ -1,93 +1,154 @@
 
 
 
 
 
1
  """
2
- Flare – Chat Handler
3
- ~~~~~~~~~~~~~~~~~~~~
4
- Intent → Parametre → API → Humanize akışı.
5
- """
6
 
7
  import json
8
- import re
9
- from datetime import datetime
10
- from typing import Dict, List, Optional
11
 
12
  from fastapi import APIRouter, HTTPException
 
13
 
14
- from config_provider import (ConfigProvider, IntentConfig,
15
- ParameterConfig, VersionConfig)
16
- from llm_connector import ask_llm
17
- from prompt_builder import (build_intent_prompt, build_param_prompt,
18
- build_api_humanize_prompt)
 
 
 
 
 
 
19
  from api_executor import call_api
20
- from session import SessionStore
21
  from utils import log
22
 
23
  router = APIRouter()
24
  cfg = ConfigProvider.get()
25
 
26
- # ==================== Helpers ===============================================
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  def _detect_intent(version: VersionConfig, user_input: str) -> Optional[IntentConfig]:
28
  for intent in version.intents:
29
  prompt = build_intent_prompt(version.general_prompt, intent)
30
  llm_resp = ask_llm(prompt, user_input, mode="classification")
31
- if intent.name in llm_resp.lower():
32
  return intent
33
  return None
34
 
35
 
36
- def _extract_params(intent: IntentConfig, version: VersionConfig,
37
- user_input: str,
38
- current_vars: Dict[str, str]) -> Dict[str, str]:
39
- missing = [p for p in intent.parameters
40
- if p.name not in current_vars]
 
 
41
  if not missing:
42
  return current_vars
43
 
44
  prompt_base = build_intent_prompt(version.general_prompt, intent)
45
  prompt = build_param_prompt(prompt_base, missing)
46
- llm_json = ask_llm(prompt, user_input, mode="json")
47
  current_vars.update(llm_json)
48
  return current_vars
49
 
50
 
51
- # ==================== Endpoint ==============================================
52
- @router.post("/chat")
53
- def chat(session_id: str, user_input: str):
54
- session = SessionStore.get(session_id)
55
- project = cfg.projects[0] # simplistic: first project
56
- version = next(v for v in project.versions if v.published)
 
 
 
 
 
 
 
 
 
 
 
57
 
58
- # 1) Intent Detection
 
 
 
 
 
 
 
59
  intent = _detect_intent(version, user_input)
60
- if not intent:
61
  reply = ask_llm(version.general_prompt, user_input)
62
  session.add_turn("assistant", reply)
63
- return {"reply": reply}
64
 
65
- # 2) Param Extraction
66
- vars_ = session.variables
67
- vars_ = _extract_params(intent, version, user_input, vars_)
68
 
69
- # 3) Eksik parametre kontrolü
70
  missing = [p for p in intent.parameters if p.required and p.name not in vars_]
71
  if missing:
72
- ask_prompts = "\n".join(p.invalid_prompt or f"{p.name} gerekti" for p in missing)
 
 
73
  reply = ask_llm(version.general_prompt, ask_prompts)
74
- return {"reply": reply}
 
 
75
 
76
- # 4) API
77
  api_cfg = cfg.apis[intent.action]
78
  try:
79
  resp = call_api(api_cfg, vars_)
80
  except Exception as e:
81
  log(f"❌ API error: {e}")
82
  reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu."
83
- return {"reply": reply}
 
84
 
85
  # 5) Humanize
86
  human_prompt = build_api_humanize_prompt(
87
  version.general_prompt,
88
  api_cfg.response_prompt,
89
- json.dumps(resp.json(), ensure_ascii=False, indent=2)
90
  )
91
  reply = ask_llm(human_prompt, "")
92
  session.add_turn("assistant", reply)
93
- return {"reply": reply, "session": session.to_dict()}
 
1
+ """Flare – Chat / Intent Orchestration Endpoints
2
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3
+ • POST /start_session → yeni session_id döner
4
+ • POST /chat → intent → param → API → humanize akışı
5
+ LLM entegrasyonu şimdilik stub.
6
  """
7
+
8
+ from __future__ import annotations
 
 
9
 
10
  import json
11
+ from typing import Dict, Optional
 
 
12
 
13
  from fastapi import APIRouter, HTTPException
14
+ from pydantic import BaseModel
15
 
16
+ from config_provider import (
17
+ ConfigProvider,
18
+ IntentConfig,
19
+ ParameterConfig,
20
+ VersionConfig,
21
+ )
22
+ from prompt_builder import (
23
+ build_intent_prompt,
24
+ build_param_prompt,
25
+ build_api_humanize_prompt,
26
+ )
27
  from api_executor import call_api
28
+ from session import session_store, Session
29
  from utils import log
30
 
31
  router = APIRouter()
32
  cfg = ConfigProvider.get()
33
 
34
+ # ---------------- Pydantic bodies ----------------
35
+ class SessionStartRequest(BaseModel):
36
+ project_name: str
37
+
38
+
39
+ class SessionStartResponse(BaseModel):
40
+ session_id: str
41
+
42
+
43
+ class ChatRequest(BaseModel):
44
+ session_id: str
45
+ user_input: str
46
+
47
+
48
+ class ChatResponse(BaseModel):
49
+ reply: str
50
+ session: Dict
51
+
52
+
53
+ # --------------- TEMPORARY LLM stub ---------------
54
+ def ask_llm(prompt: str, user_input: str, mode: str = "text"):
55
+ log(f"🤖 [STUB ask_llm] mode={mode} prompt_len={len(prompt)}")
56
+ if mode == "classification":
57
+ return "" # intent yokmuş gibi davran
58
+ if mode == "json":
59
+ return {}
60
+ return "Bu bir test yanıtıdır (LLM stub)."
61
+
62
+
63
+ # ---------------- Helper funcs --------------------
64
  def _detect_intent(version: VersionConfig, user_input: str) -> Optional[IntentConfig]:
65
  for intent in version.intents:
66
  prompt = build_intent_prompt(version.general_prompt, intent)
67
  llm_resp = ask_llm(prompt, user_input, mode="classification")
68
+ if intent.name.lower() in llm_resp.lower():
69
  return intent
70
  return None
71
 
72
 
73
+ def _extract_params(
74
+ intent: IntentConfig,
75
+ version: VersionConfig,
76
+ user_input: str,
77
+ current_vars: Dict[str, str],
78
+ ) -> Dict[str, str]:
79
+ missing = [p for p in intent.parameters if p.name not in current_vars]
80
  if not missing:
81
  return current_vars
82
 
83
  prompt_base = build_intent_prompt(version.general_prompt, intent)
84
  prompt = build_param_prompt(prompt_base, missing)
85
+ llm_json = ask_llm(prompt, user_input, mode="json") or {}
86
  current_vars.update(llm_json)
87
  return current_vars
88
 
89
 
90
+ # ---------------- Endpoints -----------------------
91
+ @router.post("/start_session", response_model=SessionStartResponse)
92
+ def start_session(body: SessionStartRequest):
93
+ session = session_store.create_session(body.project_name)
94
+ return SessionStartResponse(session_id=session.session_id)
95
+
96
+
97
+ @router.post("/chat", response_model=ChatResponse)
98
+ def chat(body: ChatRequest):
99
+ session: Session | None = session_store.get_session(body.session_id)
100
+ if session is None:
101
+ raise HTTPException(status_code=404, detail="Session not found")
102
+
103
+ # ---- project & version seçimi ----
104
+ project = next((p for p in cfg.projects if p.name == session.project_name), None)
105
+ if project is None:
106
+ raise HTTPException(status_code=400, detail="Project not configured")
107
 
108
+ version = next((v for v in project.versions if v.published), None)
109
+ if version is None:
110
+ raise HTTPException(status_code=400, detail="No published version")
111
+
112
+ user_input = body.user_input
113
+ session.add_turn("user", user_input)
114
+
115
+ # 1) Intent detection
116
  intent = _detect_intent(version, user_input)
117
+ if intent is None:
118
  reply = ask_llm(version.general_prompt, user_input)
119
  session.add_turn("assistant", reply)
120
+ return ChatResponse(reply=reply, session=session.to_dict())
121
 
122
+ # 2) Param extraction
123
+ vars_ = _extract_params(intent, version, user_input, session.variables)
 
124
 
125
+ # 3) Missing params?
126
  missing = [p for p in intent.parameters if p.required and p.name not in vars_]
127
  if missing:
128
+ ask_prompts = "\n".join(
129
+ p.invalid_prompt or f"{p.name} bilgisine ihtiyacım var." for p in missing
130
+ )
131
  reply = ask_llm(version.general_prompt, ask_prompts)
132
+ session.awaiting_parameters = [p.name for p in missing]
133
+ session.add_turn("assistant", reply)
134
+ return ChatResponse(reply=reply, session=session.to_dict())
135
 
136
+ # 4) API call
137
  api_cfg = cfg.apis[intent.action]
138
  try:
139
  resp = call_api(api_cfg, vars_)
140
  except Exception as e:
141
  log(f"❌ API error: {e}")
142
  reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu."
143
+ session.add_turn("assistant", reply)
144
+ return ChatResponse(reply=reply, session=session.to_dict())
145
 
146
  # 5) Humanize
147
  human_prompt = build_api_humanize_prompt(
148
  version.general_prompt,
149
  api_cfg.response_prompt,
150
+ json.dumps(resp.json(), ensure_ascii=False, indent=2),
151
  )
152
  reply = ask_llm(human_prompt, "")
153
  session.add_turn("assistant", reply)
154
+ return ChatResponse(reply=reply, session=session.to_dict())