File size: 2,887 Bytes
c8aa98a
 
 
 
 
 
 
261bb88
c8aa98a
ddb7871
c8aa98a
ddb7871
 
5f8de6f
6a8c132
c8aa98a
 
 
 
79d80a7
6a8c132
 
 
 
 
 
c8aa98a
 
6a8c132
c8aa98a
 
 
 
6a8c132
 
c8aa98a
 
1eae6af
6a8c132
c8aa98a
6a8c132
 
c8aa98a
6a8c132
c8aa98a
 
 
 
 
6a8c132
c8aa98a
 
 
 
6a8c132
 
c8aa98a
 
6a8c132
c8aa98a
79d80a7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# api/utils.py

from datetime import datetime
import json
import uuid
from typing import Any, Dict, Optional, AsyncGenerator, List

from fastapi import HTTPException
from api.models import ChatRequest, Message
from api.logger import setup_logger
from api.providers import AmigoChat

logger = setup_logger(__name__)

async def process_streaming_response(request: ChatRequest) -> AsyncGenerator[str, None]:
    logger.info("Processing streaming response with AmigoChat")
    messages = [msg.dict() for msg in request.messages]

    try:
        async for content in await AmigoChat.generate_response(
            model=request.model,
            messages=messages,
            stream=True
        ):
            timestamp = int(datetime.now().timestamp())
            chunk = {
                "id": f"chatcmpl-{uuid.uuid4()}",
                "object": "chat.completion.chunk",
                "created": timestamp,
                "model": request.model,
                "choices": [
                    {
                        "index": 0,
                        "delta": {"content": content},
                        "finish_reason": None,
                    }
                ],
            }
            yield f"data: {json.dumps(chunk)}\n\n"

        # Indicate the end of the stream
        end_chunk = {
            "id": f"chatcmpl-{uuid.uuid4()}",
            "object": "chat.completion.chunk",
            "created": int(datetime.now().timestamp()),
            "model": request.model,
            "choices": [
                {
                    "index": 0,
                    "delta": {},
                    "finish_reason": "stop",
                }
            ],
        }
        yield f"data: {json.dumps(end_chunk)}\n\n"
        yield "data: [DONE]\n\n"

    except Exception as e:
        logger.error(f"Error in streaming response: {e}")
        raise HTTPException(status_code=500, detail=str(e))

async def process_non_streaming_response(request: ChatRequest):
    logger.info("Processing non-streaming response with AmigoChat")
    messages = [msg.dict() for msg in request.messages]

    try:
        content = await AmigoChat.generate_response(
            model=request.model,
            messages=messages,
            stream=False
        )

        return {
            "id": f"chatcmpl-{uuid.uuid4()}",
            "object": "chat.completion",
            "created": int(datetime.now().timestamp()),
            "model": request.model,
            "choices": [
                {
                    "index": 0,
                    "message": {"role": "assistant", "content": content},
                    "finish_reason": "stop",
                }
            ],
            "usage": None,
        }

    except Exception as e:
        logger.error(f"Error in non-streaming response: {e}")
        raise HTTPException(status_code=500, detail=str(e))