# src/models/chat.py from pydantic import BaseModel from typing import Optional, List, Dict from datetime import datetime from .base import ChatMetadata class ChatRequest(BaseModel): """Request model for chat endpoint""" query: str llm_provider: str = 'openai' max_context_docs: int = 3 temperature: float = 0.7 stream: bool = False conversation_id: Optional[str] = None class ChatResponse(ChatMetadata): """Response model for chat endpoint""" response: str context: Optional[List[str]] = None sources: Optional[List[Dict[str, str]]] = None relevant_doc_scores: Optional[List[float]] = None class FeedbackRequest(BaseModel): """Request model for feedback endpoint""" rating: int feedback: Optional[str] = None class SummarizeRequest(BaseModel): """Request model for summarize endpoint""" conversation_id: str include_metadata: bool = True class SummaryResponse(BaseModel): """Response model for summarize endpoint""" summary: str key_insights: Dict metadata: Optional[Dict] = None