File size: 1,919 Bytes
c8aa98a 261bb88 c8aa98a ddb7871 c8aa98a ddb7871 5f8de6f 6a8c132 c8aa98a 6a8c132 c8aa98a 6a8c132 c8aa98a 6a8c132 c8aa98a 1eae6af 6a8c132 c8aa98a 6a8c132 c8aa98a 6a8c132 c8aa98a 6a8c132 c8aa98a 6a8c132 c8aa98a 6a8c132 c8aa98a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
# api/utils.py
from datetime import datetime
import json
import uuid
from typing import Any, Dict, Optional, AsyncGenerator, List
from fastapi import HTTPException
from api.models import ChatRequest, Message
from api.logger import setup_logger
from api.providers import AmigoChat
logger = setup_logger(__name__)
async def process_streaming_response(request: ChatRequest) -> AsyncGenerator[str, None]:
logger.info("Processing streaming response with AmigoChat")
messages = [msg.dict() for msg in request.messages]
try:
async for content in AmigoChat.generate_response(
model=request.model,
messages=messages,
stream=True
):
timestamp = int(datetime.now().timestamp())
chunk = {
"id": f"chatcmpl-{uuid.uuid4()}",
"object": "chat.completion.chunk",
"created": timestamp,
"model": request.model,
"choices": [
{
"index": 0,
"delta": {"content": content},
"finish_reason": None,
}
],
}
yield f"data: {json.dumps(chunk)}\n\n"
# Indicate the end of the stream
end_chunk = {
"id": f"chatcmpl-{uuid.uuid4()}",
"object": "chat.completion.chunk",
"created": int(datetime.now().timestamp()),
"model": request.model,
"choices": [
{
"index": 0,
"delta": {},
"finish_reason": "stop",
}
],
}
yield f"data: {json.dumps(end_chunk)}\n\n"
yield "data: [DONE]\n\n"
except Exception as e:
logger.error(f"Error in streaming response: {e}")
raise HTTPException(status_code=500, detail=str(e))
|