Niansuh commited on
Commit
c8aa98a
·
verified ·
1 Parent(s): ba592f1

Update api/utils.py

Browse files
Files changed (1) hide show
  1. api/utils.py +88 -15
api/utils.py CHANGED
@@ -1,20 +1,93 @@
 
 
 
 
 
 
 
1
  from fastapi import HTTPException
2
- from api.config import MODEL_PROVIDER_MAPPING
3
- from api.provider import amigochat
4
  from api.logger import setup_logger
 
5
 
6
  logger = setup_logger(__name__)
7
 
8
- async def process_streaming_response(request_data):
9
- provider_name = MODEL_PROVIDER_MAPPING.get(request_data.get('model'))
10
- if provider_name == 'amigochat':
11
- return await amigochat.process_streaming_response(request_data)
12
- else:
13
- raise HTTPException(status_code=400, detail=f"Model {request_data.get('model')} is not supported for streaming.")
14
-
15
- async def process_non_streaming_response(request_data):
16
- provider_name = MODEL_PROVIDER_MAPPING.get(request_data.get('model'))
17
- if provider_name == 'amigochat':
18
- return await amigochat.process_non_streaming_response(request_data)
19
- else:
20
- raise HTTPException(status_code=400, detail=f"Model {request_data.get('model')} is not supported.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # api/utils.py
2
+
3
+ from datetime import datetime
4
+ import json
5
+ import uuid
6
+ from typing import Any, Dict, Optional, AsyncGenerator, List
7
+
8
  from fastapi import HTTPException
9
+ from api.models import ChatRequest, Message
 
10
  from api.logger import setup_logger
11
+ from api.providers import AmigoChat
12
 
13
  logger = setup_logger(__name__)
14
 
15
+ async def process_streaming_response(request: ChatRequest):
16
+ logger.info("Processing streaming response with AmigoChat")
17
+ messages = [msg.dict() for msg in request.messages]
18
+
19
+ try:
20
+ async for content in AmigoChat.generate_response(
21
+ model=request.model,
22
+ messages=messages,
23
+ stream=True
24
+ ):
25
+ timestamp = int(datetime.now().timestamp())
26
+ yield json.dumps({
27
+ "id": f"chatcmpl-{uuid.uuid4()}",
28
+ "object": "chat.completion.chunk",
29
+ "created": timestamp,
30
+ "model": request.model,
31
+ "choices": [
32
+ {
33
+ "index": 0,
34
+ "delta": {"content": content},
35
+ "finish_reason": None,
36
+ }
37
+ ],
38
+ "usage": None,
39
+ }) + "\n"
40
+
41
+ # Indicate the end of the stream
42
+ yield json.dumps({
43
+ "id": f"chatcmpl-{uuid.uuid4()}",
44
+ "object": "chat.completion.chunk",
45
+ "created": int(datetime.now().timestamp()),
46
+ "model": request.model,
47
+ "choices": [
48
+ {
49
+ "index": 0,
50
+ "delta": {},
51
+ "finish_reason": "stop",
52
+ }
53
+ ],
54
+ "usage": None,
55
+ }) + "\n"
56
+
57
+ except Exception as e:
58
+ logger.error(f"Error in streaming response: {e}")
59
+ raise HTTPException(status_code=500, detail=str(e))
60
+
61
+ async def process_non_streaming_response(request: ChatRequest):
62
+ logger.info("Processing non-streaming response with AmigoChat")
63
+ messages = [msg.dict() for msg in request.messages]
64
+
65
+ try:
66
+ responses = []
67
+ async for content in AmigoChat.generate_response(
68
+ model=request.model,
69
+ messages=messages,
70
+ stream=False
71
+ ):
72
+ responses.append(content)
73
+
74
+ full_response = ''.join(responses)
75
+
76
+ return {
77
+ "id": f"chatcmpl-{uuid.uuid4()}",
78
+ "object": "chat.completion",
79
+ "created": int(datetime.now().timestamp()),
80
+ "model": request.model,
81
+ "choices": [
82
+ {
83
+ "index": 0,
84
+ "message": {"role": "assistant", "content": full_response},
85
+ "finish_reason": "stop",
86
+ }
87
+ ],
88
+ "usage": None,
89
+ }
90
+
91
+ except Exception as e:
92
+ logger.error(f"Error in non-streaming response: {e}")
93
+ raise HTTPException(status_code=500, detail=str(e))