muryshev commited on
Commit
82b615f
·
1 Parent(s): 9390ea2
components/dbo/alembic/versions/d6124aee7cce_add_chat_id_to_log.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Add chat_id to Log
2
+
3
+ Revision ID: d6124aee7cce
4
+ Revises: 12bb1ebae3ff
5
+ Create Date: 2025-04-18 16:02:17.245022
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = 'd6124aee7cce'
16
+ down_revision: Union[str, None] = '12bb1ebae3ff'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ op.add_column('log', sa.Column('chat_id', sa.String(), nullable=True))
25
+ # ### end Alembic commands ###
26
+
27
+
28
+ def downgrade() -> None:
29
+ """Downgrade schema."""
30
+ # ### commands auto generated by Alembic - please adjust! ###
31
+ op.drop_column('log', 'chat_id')
32
+ # ### end Alembic commands ###
components/dbo/models/log.py CHANGED
@@ -1,8 +1,7 @@
1
  from sqlalchemy import (
2
- Integer,
3
- String,
4
  )
5
- from sqlalchemy.orm import relationship, mapped_column
6
  from components.dbo.models.base import Base
7
 
8
 
@@ -15,4 +14,5 @@ class Log(Base):
15
  llm_result = mapped_column(String)
16
  llm_settings = mapped_column(String)
17
  user_name = mapped_column(String)
18
- error = mapped_column(String)
 
 
1
  from sqlalchemy import (
2
+ String
 
3
  )
4
+ from sqlalchemy.orm import mapped_column
5
  from components.dbo.models.base import Base
6
 
7
 
 
14
  llm_result = mapped_column(String)
15
  llm_settings = mapped_column(String)
16
  user_name = mapped_column(String)
17
+ error = mapped_column(String)
18
+ chat_id = mapped_column(String)
components/llm/common.py CHANGED
@@ -1,4 +1,4 @@
1
- from pydantic import BaseModel, Field
2
  from typing import Optional, List, Protocol
3
 
4
  class LlmPredictParams(BaseModel):
@@ -77,4 +77,5 @@ class Message(BaseModel):
77
  reasoning: Optional[str] = ''
78
 
79
  class ChatRequest(BaseModel):
80
- history: List[Message]
 
 
1
+ from pydantic import UUID4, BaseModel, Field
2
  from typing import Optional, List, Protocol
3
 
4
  class LlmPredictParams(BaseModel):
 
77
  reasoning: Optional[str] = ''
78
 
79
  class ChatRequest(BaseModel):
80
+ history: List[Message]
81
+ chat_id: Optional[str] = None
components/services/log.py CHANGED
@@ -34,11 +34,15 @@ class LogService:
34
  def get_list(self, filters: LogFilterSchema) -> PaginatedLogResponse:
35
  logger.info(f"Fetching logs with filters: {filters.model_dump(exclude_none=True)}")
36
  with self.db() as session:
37
- query = session.query(LogSQL)
38
 
39
  # Применение фильтра по user_name
40
  if filters.user_name:
41
  query = query.filter(LogSQL.user_name == filters.user_name)
 
 
 
 
42
 
43
  # Применение фильтра по диапазону date_created
44
  if filters.date_from:
@@ -46,6 +50,15 @@ class LogService:
46
  if filters.date_to:
47
  query = query.filter(LogSQL.date_created <= filters.date_to)
48
 
 
 
 
 
 
 
 
 
 
49
  total = query.count()
50
 
51
  # Применение пагинации
 
34
  def get_list(self, filters: LogFilterSchema) -> PaginatedLogResponse:
35
  logger.info(f"Fetching logs with filters: {filters.model_dump(exclude_none=True)}")
36
  with self.db() as session:
37
+ query = session.query(LogSQL).order_by(LogSQL.date_created.desc())
38
 
39
  # Применение фильтра по user_name
40
  if filters.user_name:
41
  query = query.filter(LogSQL.user_name == filters.user_name)
42
+
43
+ # Применение фильтра по chat_id (contains)
44
+ if filters.chat_id:
45
+ query = query.filter(LogSQL.chat_id.startswith(filters.chat_id))
46
 
47
  # Применение фильтра по диапазону date_created
48
  if filters.date_from:
 
50
  if filters.date_to:
51
  query = query.filter(LogSQL.date_created <= filters.date_to)
52
 
53
+ # Сортировка
54
+ if filters.sort:
55
+ for sort_param in filters.sort:
56
+ if sort_param.field == "date_created":
57
+ if sort_param.direction == "asc":
58
+ query = query.order_by(LogSQL.date_created.asc())
59
+ elif sort_param.direction == "desc":
60
+ query = query.order_by(LogSQL.date_created.desc())
61
+
62
  total = query.count()
63
 
64
  # Применение пагинации
requirements.txt CHANGED
@@ -3,6 +3,7 @@ fastapi==0.113.0
3
  unicorn==2.0.1.post1
4
  transformers==4.42.4
5
  pandas==2.2.2
 
6
  numpy==1.26.4
7
  tqdm==4.66.5
8
  nltk==3.8.1
 
3
  unicorn==2.0.1.post1
4
  transformers==4.42.4
5
  pandas==2.2.2
6
+ openpyxl==3.1.5
7
  numpy==1.26.4
8
  tqdm==4.66.5
9
  nltk==3.8.1
routes/llm.py CHANGED
@@ -1,7 +1,7 @@
1
  import json
2
  import logging
3
  import os
4
- from typing import Annotated, AsyncGenerator, Optional
5
 
6
  from fastapi import APIRouter, Depends, HTTPException
7
  from fastapi.responses import StreamingResponse
@@ -87,7 +87,7 @@ def try_insert_reasoning(
87
  if msg.role == "user":
88
  msg.reasoning = reasoning
89
 
90
- def collapse_history_to_first_message(chat_request: ChatRequest) -> ChatRequest:
91
  """
92
  Сворачивает историю в первое сообщение и возвращает новый объект ChatRequest.
93
  Формат:
@@ -118,18 +118,18 @@ def collapse_history_to_first_message(chat_request: ChatRequest) -> ChatRequest:
118
  </last-request>
119
  assistant:
120
  """
121
- if not chat_request.history:
122
- return ChatRequest(history=[])
123
 
124
- last_user_message = chat_request.history[-1]
125
- if chat_request.history[-1].role != "user":
126
  logger.warning("Last message is not user message")
127
 
128
 
129
  # Собираем историю в одну строку
130
  collapsed_content = []
131
  collapsed_content.append("<INPUT><history>\n")
132
- for msg in chat_request.history[:-1]:
133
  if msg.content.strip():
134
  tabulated_content = msg.content.strip().replace("\n", "\n\t\t")
135
  collapsed_content.append(f"\t<{msg.role.strip()}>\n\t\t{tabulated_content}\n\t</{msg.role.strip()}>\n")
@@ -160,7 +160,7 @@ def collapse_history_to_first_message(chat_request: ChatRequest) -> ChatRequest:
160
  content=new_content,
161
  searchResults=''
162
  )
163
- return ChatRequest(history=[new_message])
164
 
165
  async def sse_generator(request: ChatRequest, llm_api: DeepInfraApi, system_prompt: str,
166
  predict_params: LlmPredictParams,
@@ -173,7 +173,7 @@ async def sse_generator(request: ChatRequest, llm_api: DeepInfraApi, system_prom
173
  Генератор для стриминга ответа LLM через SSE.
174
  """
175
  # Создаем экземпляр "сквозного" лога через весь процесс
176
- log = LogCreateSchema(user_name=current_user.username)
177
 
178
  try:
179
  old_history = request.history
@@ -259,7 +259,10 @@ async def sse_generator(request: ChatRequest, llm_api: DeepInfraApi, system_prom
259
  log_error = None
260
  try:
261
  # Сворачиваем историю в первое сообщение
262
- collapsed_request = collapse_history_to_first_message(request)
 
 
 
263
 
264
  log.llm_result = ''
265
 
 
1
  import json
2
  import logging
3
  import os
4
+ from typing import Annotated, AsyncGenerator, List, Optional
5
 
6
  from fastapi import APIRouter, Depends, HTTPException
7
  from fastapi.responses import StreamingResponse
 
87
  if msg.role == "user":
88
  msg.reasoning = reasoning
89
 
90
+ def collapse_history_to_first_message(chat_history: List[Message]) -> List[Message]:
91
  """
92
  Сворачивает историю в первое сообщение и возвращает новый объект ChatRequest.
93
  Формат:
 
118
  </last-request>
119
  assistant:
120
  """
121
+ if not chat_history:
122
+ return []
123
 
124
+ last_user_message = chat_history[-1]
125
+ if chat_history[-1].role != "user":
126
  logger.warning("Last message is not user message")
127
 
128
 
129
  # Собираем историю в одну строку
130
  collapsed_content = []
131
  collapsed_content.append("<INPUT><history>\n")
132
+ for msg in chat_history[:-1]:
133
  if msg.content.strip():
134
  tabulated_content = msg.content.strip().replace("\n", "\n\t\t")
135
  collapsed_content.append(f"\t<{msg.role.strip()}>\n\t\t{tabulated_content}\n\t</{msg.role.strip()}>\n")
 
160
  content=new_content,
161
  searchResults=''
162
  )
163
+ return [new_message]
164
 
165
  async def sse_generator(request: ChatRequest, llm_api: DeepInfraApi, system_prompt: str,
166
  predict_params: LlmPredictParams,
 
173
  Генератор для стриминга ответа LLM через SSE.
174
  """
175
  # Создаем экземпляр "сквозного" лога через весь процесс
176
+ log = LogCreateSchema(user_name=current_user.username, chat_id=request.chat_id)
177
 
178
  try:
179
  old_history = request.history
 
259
  log_error = None
260
  try:
261
  # Сворачиваем историю в первое сообщение
262
+ collapsed_request = ChatRequest(
263
+ history=collapse_history_to_first_message(request.history),
264
+ chat_id = request.chat_id
265
+ )
266
 
267
  log.llm_result = ''
268
 
routes/log.py CHANGED
@@ -1,8 +1,11 @@
 
1
  import logging
2
  from datetime import datetime
 
3
  from typing import Annotated, List, Optional
4
 
5
  from fastapi import APIRouter, Depends, HTTPException, Query
 
6
  from pydantic import BaseModel
7
 
8
  from common import auth
@@ -10,6 +13,7 @@ from common.common import configure_logging
10
  from components.services.log import LogService
11
  from schemas.log import LogCreateSchema, LogFilterSchema, LogSchema, PaginatedLogResponse
12
  import common.dependencies as DI
 
13
 
14
  router = APIRouter(tags=['Logs'])
15
 
@@ -22,6 +26,7 @@ async def get_all_logs(
22
  log_service: Annotated[LogService, Depends(DI.get_log_service)],
23
  current_user: Annotated[any, Depends(auth.get_current_user)]
24
  ):
 
25
  logger.info(f'GET /logs')
26
 
27
  try:
@@ -29,4 +34,52 @@ async def get_all_logs(
29
  except HTTPException as e:
30
  raise e
31
  except Exception as e:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  raise HTTPException(status_code=500, detail=str(e))
 
1
+ from io import BytesIO
2
  import logging
3
  from datetime import datetime
4
+ import sys
5
  from typing import Annotated, List, Optional
6
 
7
  from fastapi import APIRouter, Depends, HTTPException, Query
8
+ from fastapi.responses import StreamingResponse
9
  from pydantic import BaseModel
10
 
11
  from common import auth
 
13
  from components.services.log import LogService
14
  from schemas.log import LogCreateSchema, LogFilterSchema, LogSchema, PaginatedLogResponse
15
  import common.dependencies as DI
16
+ import pandas as pd
17
 
18
  router = APIRouter(tags=['Logs'])
19
 
 
26
  log_service: Annotated[LogService, Depends(DI.get_log_service)],
27
  current_user: Annotated[any, Depends(auth.get_current_user)]
28
  ):
29
+ logger.info(f"Fetching logsываыва with filters: {filters.model_dump(exclude_none=True)}")
30
  logger.info(f'GET /logs')
31
 
32
  try:
 
34
  except HTTPException as e:
35
  raise e
36
  except Exception as e:
37
+ raise HTTPException(status_code=500, detail=str(e))
38
+
39
+ @router.get('/logs/excel')
40
+ async def get_all_logs_excel(
41
+ filters: LogFilterSchema = Depends(),
42
+ log_service: LogService = Depends(DI.get_log_service),
43
+ current_user: any = Depends(auth.get_current_user)
44
+ ):
45
+ logger.info(f'GET /logs/excel with filters: {filters.model_dump(exclude_none=True)}')
46
+
47
+ try:
48
+ # Получаем логи без пагинации (все записи по фильтру)
49
+ filters.page = 1
50
+ filters.page_size = sys.maxsize
51
+ logs_response = log_service.get_list(filters)
52
+
53
+ logs_data = [
54
+ {
55
+ 'ID': log.id,
56
+ 'Date Created': log.date_created,
57
+ 'User Name': log.user_name or '',
58
+ 'Chat ID': log.chat_id or '',
59
+ 'User Request': log.user_request or '',
60
+ 'QE Result': log.qe_result or '',
61
+ 'Search Result': log.search_result or '',
62
+ 'LLM Result': log.llm_result or ''
63
+ }
64
+ for log in logs_response.data
65
+ ]
66
+
67
+ df = pd.DataFrame(logs_data)
68
+
69
+ output = BytesIO()
70
+ with pd.ExcelWriter(output, engine='openpyxl') as writer:
71
+ df.to_excel(writer, index=False, sheet_name='Logs')
72
+
73
+ headers = {
74
+ 'Content-Disposition': 'attachment; filename="logs.xlsx"',
75
+ 'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
76
+ }
77
+
78
+ output.seek(0)
79
+ return StreamingResponse(output, headers=headers, media_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
80
+
81
+ except HTTPException as e:
82
+ raise e
83
+ except Exception as e:
84
+ logger.error(f'Error generating Excel: {str(e)}')
85
  raise HTTPException(status_code=500, detail=str(e))
schemas/chat.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+ from pydantic import BaseModel
3
+
4
+ from components.llm.common import ChatRequest, Message
5
+
6
+
7
+ class MessageSchema(BaseModel):
8
+ role: str
9
+ content: str
10
+ searchResults: Optional[str] = ''
11
+ searchEntities: Optional[List[str]] = []
12
+ reasoning: Optional[str] = ''
13
+
14
+ def to_bl(self) -> Message:
15
+ return ChatRequest.model_validate(self.model_dump())
16
+
17
+
18
+ class ChatRequestSchema(BaseModel):
19
+ history: List[MessageSchema]
20
+ chat_id: Optional[str]
21
+
22
+ def to_bl(self) -> ChatRequest:
23
+ return ChatRequest.model_validate(self.model_dump(exclude={"chat_id"}))
24
+
25
+ @classmethod
26
+ def from_bl(cls, bl: ChatRequest, chat_id: Optional[str] = None) -> "ChatRequestSchema":
27
+ return cls.model_validate({
28
+ "history": [msg.model_dump() for msg in bl.history],
29
+ "chat_id": chat_id
30
+ })
schemas/log.py CHANGED
@@ -1,7 +1,7 @@
1
  from datetime import datetime
2
  from typing import List, Optional
3
 
4
- from pydantic import BaseModel
5
 
6
 
7
  class LogSchema(BaseModel):
@@ -13,6 +13,7 @@ class LogSchema(BaseModel):
13
  llm_result: Optional[str] = None
14
  llm_settings: Optional[str] = None
15
  user_name: Optional[str] = None
 
16
  error: Optional[str] = None
17
 
18
  class LogCreateSchema(BaseModel):
@@ -23,14 +24,21 @@ class LogCreateSchema(BaseModel):
23
  llm_settings: Optional[str] = None
24
  user_name: Optional[str] = None
25
  error: Optional[str] = None
 
26
 
 
 
 
 
27
  class LogFilterSchema(BaseModel):
28
  user_name: Optional[str] = None
 
29
  date_from: Optional[datetime] = None
30
  date_to: Optional[datetime] = None
31
 
32
  page: int = 1 # Номер страницы, по умолчанию 1
33
  page_size: int = 50 # Размер страницы, по умолчанию 50
 
34
 
35
  class Config:
36
  json_schema_extra = {
@@ -39,7 +47,10 @@ class LogFilterSchema(BaseModel):
39
  "date_from": "2024-01-01T00:00:00",
40
  "date_to": "2026-12-31T23:59:59",
41
  "page": 1,
42
- "page_size": 50
 
 
 
43
  }
44
  }
45
 
@@ -48,4 +59,7 @@ class PaginatedLogResponse(BaseModel):
48
  total: int
49
  page: int
50
  page_size: int
51
- total_pages: int
 
 
 
 
1
  from datetime import datetime
2
  from typing import List, Optional
3
 
4
+ from pydantic import UUID4, BaseModel
5
 
6
 
7
  class LogSchema(BaseModel):
 
13
  llm_result: Optional[str] = None
14
  llm_settings: Optional[str] = None
15
  user_name: Optional[str] = None
16
+ chat_id: Optional[str] = None
17
  error: Optional[str] = None
18
 
19
  class LogCreateSchema(BaseModel):
 
24
  llm_settings: Optional[str] = None
25
  user_name: Optional[str] = None
26
  error: Optional[str] = None
27
+ chat_id: Optional[str] = None
28
 
29
+ class SortParam(BaseModel):
30
+ field: str
31
+ direction: str # "asc" | "desc"
32
+
33
  class LogFilterSchema(BaseModel):
34
  user_name: Optional[str] = None
35
+ chat_id: Optional[str] = None
36
  date_from: Optional[datetime] = None
37
  date_to: Optional[datetime] = None
38
 
39
  page: int = 1 # Номер страницы, по умолчанию 1
40
  page_size: int = 50 # Размер страницы, по умолчанию 50
41
+ sort: Optional[List[SortParam]] = None # Список параметров сортировки
42
 
43
  class Config:
44
  json_schema_extra = {
 
47
  "date_from": "2024-01-01T00:00:00",
48
  "date_to": "2026-12-31T23:59:59",
49
  "page": 1,
50
+ "page_size": 50,
51
+ "sort": [
52
+ {"field": "date_created", "direction": "desc"}
53
+ ]
54
  }
55
  }
56
 
 
59
  total: int
60
  page: int
61
  page_size: int
62
+ total_pages: int
63
+
64
+
65
+