ciyidogan commited on
Commit
581208a
·
verified ·
1 Parent(s): 2605800

Update chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +34 -8
chat_handler.py CHANGED
@@ -203,7 +203,7 @@ class ChatRequest(BaseModel):
203
 
204
  class StartRequest(BaseModel):
205
  project_name: str
206
- version_number: Optional[int] = None # Opsiyonel, belirtilmezse published olan en büyük version no'yu kullan
207
 
208
  class ChatResponse(BaseModel):
209
  session_id: str
@@ -213,6 +213,8 @@ class ChatResponse(BaseModel):
213
  @router.post("/start_session", response_model=ChatResponse)
214
  async def start_session(req: StartRequest):
215
  """Create new session"""
 
 
216
  try:
217
  # Validate project exists
218
  project = next((p for p in cfg.projects if p.name == req.project_name and p.enabled), None)
@@ -220,26 +222,50 @@ async def start_session(req: StartRequest):
220
  raise HTTPException(404, f"Project '{req.project_name}' not found or disabled")
221
 
222
  # Find version
223
- if req.version_number:
224
  # Specific version requested
225
- version = next((v for v in project.versions if v.id == req.version_number), None)
226
  if not version:
227
- raise HTTPException(404, f"Version {req.version_number} not found for project '{req.project_name}'")
228
  else:
229
  # Find published version with highest version number
230
  published_versions = [v for v in project.versions if v.published]
231
  if not published_versions:
232
  raise HTTPException(404, f"No published version for project '{req.project_name}'")
233
 
234
- # Sort by version number (id) and get the highest
235
- version = max(published_versions, key=lambda v: v.id)
 
 
 
 
 
 
236
 
237
  # Create session with version config
238
- session = session_store.create_session(req.project_name, version)
 
 
 
239
  greeting = "Hoş geldiniz! Size nasıl yardımcı olabilirim?"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
240
  session.add_turn("assistant", greeting)
241
 
242
- log(f"✅ Session created for project '{req.project_name}' version {version.id} (highest published)")
243
 
244
  return ChatResponse(session_id=session.session_id, answer=greeting)
245
 
 
203
 
204
  class StartRequest(BaseModel):
205
  project_name: str
206
+ version_no: Optional[int] = None # Opsiyonel, belirtilmezse published olan en büyük version no'yu kullan
207
 
208
  class ChatResponse(BaseModel):
209
  session_id: str
 
213
  @router.post("/start_session", response_model=ChatResponse)
214
  async def start_session(req: StartRequest):
215
  """Create new session"""
216
+ global llm_provider
217
+
218
  try:
219
  # Validate project exists
220
  project = next((p for p in cfg.projects if p.name == req.project_name and p.enabled), None)
 
222
  raise HTTPException(404, f"Project '{req.project_name}' not found or disabled")
223
 
224
  # Find version
225
+ if req.version_no:
226
  # Specific version requested
227
+ version = next((v for v in project.versions if v.no == req.version_no), None)
228
  if not version:
229
+ raise HTTPException(404, f"Version {req.version_no} not found for project '{req.project_name}'")
230
  else:
231
  # Find published version with highest version number
232
  published_versions = [v for v in project.versions if v.published]
233
  if not published_versions:
234
  raise HTTPException(404, f"No published version for project '{req.project_name}'")
235
 
236
+ # Sort by version number (no) and get the highest
237
+ version = max(published_versions, key=lambda v: v.no)
238
+
239
+ # Create LLM provider if not exists
240
+ if not llm_provider:
241
+ from llm_factory import LLMFactory
242
+ llm_provider = LLMFactory.create_provider()
243
+ log(f"🤖 LLM Provider created: {type(llm_provider).__name__}")
244
 
245
  # Create session with version config
246
+ session_id = session_store.create(req.project_name, version)
247
+ session = session_store.get(session_id)
248
+
249
+ # Welcome prompt'u işle
250
  greeting = "Hoş geldiniz! Size nasıl yardımcı olabilirim?"
251
+ if version.welcome_prompt:
252
+ log(f"🎉 Processing welcome prompt for session {session_id[:8]}...")
253
+ try:
254
+ # Welcome prompt'u LLM'e gönder
255
+ welcome_result = await llm_provider.generate(
256
+ prompt=version.welcome_prompt,
257
+ max_tokens=200,
258
+ temperature=0.7
259
+ )
260
+ if welcome_result and welcome_result.strip():
261
+ greeting = welcome_result.strip()
262
+ except Exception as e:
263
+ log(f"⚠️ Welcome prompt processing failed: {e}")
264
+ # Fallback to default greeting
265
+
266
  session.add_turn("assistant", greeting)
267
 
268
+ log(f"✅ Session created for project '{req.project_name}' version {version.no} (highest published)")
269
 
270
  return ChatResponse(session_id=session.session_id, answer=greeting)
271