Update api_service.py
Browse files- api_service.py +3 -3
api_service.py
CHANGED
@@ -90,7 +90,7 @@ class ApiService:
|
|
90 |
|
91 |
if stream:
|
92 |
# 流式响应
|
93 |
-
chunk_response = self._create_stream_chunk(chunk_text, request_id, model)
|
94 |
yield f"data: {json.dumps(chunk_response)}\n\n"
|
95 |
else:
|
96 |
# 非流式响应 - 等待完整内容
|
@@ -102,14 +102,14 @@ class ApiService:
|
|
102 |
|
103 |
if stream:
|
104 |
# 发送结束标记
|
105 |
-
final_chunk = self._create_stream_end_chunk(request_id, model)
|
106 |
yield f"data: {json.dumps(final_chunk)}\n\n"
|
107 |
yield "data: [DONE]\n\n"
|
108 |
|
109 |
logger.success(f"✅ 流式响应完成 [ID: {request_id[:8]}] [块数: {response_chunks}] [耗时: {duration:.2f}s]")
|
110 |
else:
|
111 |
# 返回完整响应
|
112 |
-
response = self._create_complete_response(total_content, request_id, model)
|
113 |
yield response
|
114 |
|
115 |
logger.success(f"✅ 完整响应完成 [ID: {request_id[:8]}] [长度: {len(total_content)}] [耗时: {duration:.2f}s]")
|
|
|
90 |
|
91 |
if stream:
|
92 |
# 流式响应
|
93 |
+
chunk_response = self._create_stream_chunk(chunk_text, request_id, openai_request.model)
|
94 |
yield f"data: {json.dumps(chunk_response)}\n\n"
|
95 |
else:
|
96 |
# 非流式响应 - 等待完整内容
|
|
|
102 |
|
103 |
if stream:
|
104 |
# 发送结束标记
|
105 |
+
final_chunk = self._create_stream_end_chunk(request_id, openai_request.model)
|
106 |
yield f"data: {json.dumps(final_chunk)}\n\n"
|
107 |
yield "data: [DONE]\n\n"
|
108 |
|
109 |
logger.success(f"✅ 流式响应完成 [ID: {request_id[:8]}] [块数: {response_chunks}] [耗时: {duration:.2f}s]")
|
110 |
else:
|
111 |
# 返回完整响应
|
112 |
+
response = self._create_complete_response(total_content, request_id, openai_request.model)
|
113 |
yield response
|
114 |
|
115 |
logger.success(f"✅ 完整响应完成 [ID: {request_id[:8]}] [长度: {len(total_content)}] [耗时: {duration:.2f}s]")
|