File size: 3,007 Bytes
82b615f
57cf043
 
82b615f
9390ea2
57cf043
9390ea2
82b615f
9390ea2
57cf043
fd3c8b9
57cf043
9390ea2
 
57cf043
82b615f
57cf043
fd3c8b9
57cf043
 
 
9390ea2
 
57cf043
9390ea2
 
 
57cf043
82b615f
9390ea2
57cf043
 
9390ea2
 
57cf043
9390ea2
82b615f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9390ea2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
from io import BytesIO
import logging
from datetime import datetime
import sys
from typing import Annotated, List, Optional

from fastapi import APIRouter, Depends, HTTPException, Query
from fastapi.responses import StreamingResponse
from pydantic import BaseModel

from common import auth
from common.common import configure_logging
from components.services.log import LogService
from schemas.log import LogCreateSchema, LogFilterSchema, LogSchema, PaginatedLogResponse
import common.dependencies as DI
import pandas as pd

router = APIRouter(tags=['Logs'])

logger = logging.getLogger(__name__)
configure_logging()
    
@router.get('/logs', response_model=PaginatedLogResponse)
async def get_all_logs(
    filters: Annotated[LogFilterSchema, Depends()],
    log_service: Annotated[LogService, Depends(DI.get_log_service)], 
    current_user: Annotated[any, Depends(auth.get_current_user)]
):
    logger.info(f"Fetching logsываыва with filters: {filters.model_dump(exclude_none=True)}")
    logger.info(f'GET /logs')

    try:
        return log_service.get_list(filters)
    except HTTPException as e:
        raise e
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))
    
@router.get('/logs/excel')
async def get_all_logs_excel(
    filters: LogFilterSchema = Depends(),
    log_service: LogService = Depends(DI.get_log_service),
    current_user: any = Depends(auth.get_current_user)
):
    logger.info(f'GET /logs/excel with filters: {filters.model_dump(exclude_none=True)}')

    try:
        # Получаем логи без пагинации (все записи по фильтру)
        filters.page = 1
        filters.page_size = sys.maxsize
        logs_response = log_service.get_list(filters)

        logs_data = [
            {
                'ID': log.id,
                'Date Created': log.date_created,
                'User Name': log.user_name or '',
                'Chat ID': log.chat_id or '',
                'User Request': log.user_request or '',
                'QE Result': log.qe_result or '',
                'Search Result': log.search_result or '',
                'LLM Result': log.llm_result or ''
            }
            for log in logs_response.data
        ]

        df = pd.DataFrame(logs_data)
        
        output = BytesIO()
        with pd.ExcelWriter(output, engine='openpyxl') as writer:
            df.to_excel(writer, index=False, sheet_name='Logs')

        headers = {
            'Content-Disposition': 'attachment; filename="logs.xlsx"',
            'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
        }

        output.seek(0)
        return StreamingResponse(output, headers=headers, media_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')

    except HTTPException as e:
        raise e
    except Exception as e:
        logger.error(f'Error generating Excel: {str(e)}')
        raise HTTPException(status_code=500, detail=str(e))