Niansuh commited on
Commit
8104ccc
·
verified ·
1 Parent(s): ddb7871

Delete api/provider/blackboxai.py

Browse files
Files changed (1) hide show
  1. api/provider/blackboxai.py +0 -198
api/provider/blackboxai.py DELETED
@@ -1,198 +0,0 @@
1
- import uuid
2
- from datetime import datetime
3
- import json
4
- from typing import Any, Dict, Optional
5
-
6
- import httpx
7
- from fastapi import HTTPException
8
- from api.models import ChatRequest
9
- from api.config import (
10
- MODEL_MAPPING,
11
- headers,
12
- AGENT_MODE,
13
- TRENDING_AGENT_MODE,
14
- BASE_URL,
15
- MODEL_PREFIXES,
16
- MODEL_REFERERS
17
- )
18
- from api.logger import setup_logger
19
-
20
- logger = setup_logger(__name__)
21
-
22
- def create_chat_completion_data(
23
- content: str, model: str, timestamp: int, finish_reason: Optional[str] = None
24
- ) -> Dict[str, Any]:
25
- return {
26
- "id": f"chatcmpl-{uuid.uuid4()}",
27
- "object": "chat.completion.chunk",
28
- "created": timestamp,
29
- "model": model,
30
- "choices": [
31
- {
32
- "index": 0,
33
- "delta": {"content": content, "role": "assistant"},
34
- "finish_reason": finish_reason,
35
- }
36
- ],
37
- "usage": None,
38
- }
39
-
40
- def message_to_dict(message, model_prefix: Optional[str] = None):
41
- if isinstance(message.content, str):
42
- content = message.content
43
- if model_prefix:
44
- content = f"{model_prefix} {content}"
45
- return {"role": message.role, "content": content}
46
- elif isinstance(message.content, list) and len(message.content) == 2:
47
- content = message.content[0]["text"]
48
- if model_prefix:
49
- content = f"{model_prefix} {content}"
50
- return {
51
- "role": message.role,
52
- "content": content,
53
- "data": {
54
- "imageBase64": message.content[1]["image_url"]["url"],
55
- "fileText": "",
56
- "title": "snapshot",
57
- },
58
- }
59
- else:
60
- return {"role": message.role, "content": message.content}
61
-
62
- def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
63
- """Remove the model prefix from the response content if present."""
64
- if model_prefix and content.startswith(model_prefix):
65
- logger.debug(f"Stripping prefix '{model_prefix}' from content.")
66
- return content[len(model_prefix):].strip()
67
- logger.debug("No prefix to strip from content.")
68
- return content
69
-
70
- async def process_streaming_response(request: ChatRequest):
71
- agent_mode = AGENT_MODE.get(request.model, {})
72
- trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
73
- model_prefix = MODEL_PREFIXES.get(request.model, "")
74
- referer_path = MODEL_REFERERS.get(request.model, f"/?model={request.model}")
75
- referer_url = f"{BASE_URL}{referer_path}"
76
-
77
- # Update headers with dynamic Referer
78
- dynamic_headers = headers.copy()
79
- dynamic_headers['Referer'] = referer_url
80
-
81
- json_data = {
82
- "messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
83
- "previewToken": None,
84
- "userId": None,
85
- "codeModelMode": True,
86
- "agentMode": agent_mode,
87
- "trendingAgentMode": trending_agent_mode,
88
- "isMicMode": False,
89
- "userSystemPrompt": None,
90
- "maxTokens": request.max_tokens,
91
- "playgroundTopP": request.top_p,
92
- "playgroundTemperature": request.temperature,
93
- "isChromeExt": False,
94
- "githubToken": None,
95
- "clickedAnswer2": False,
96
- "clickedAnswer3": False,
97
- "clickedForceWebSearch": False,
98
- "visitFromDelta": False,
99
- "mobileClient": False,
100
- "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
101
- }
102
-
103
- async with httpx.AsyncClient() as client:
104
- try:
105
- async with client.stream(
106
- "POST",
107
- f"{BASE_URL}/api/chat",
108
- headers=dynamic_headers,
109
- json=json_data,
110
- timeout=100,
111
- ) as response:
112
- response.raise_for_status()
113
- async for line in response.aiter_lines():
114
- timestamp = int(datetime.now().timestamp())
115
- if line:
116
- content = line
117
- if content.startswith("$@$v=undefined-rv1$@$"):
118
- content = content[21:]
119
- # Strip the model prefix from the response content
120
- cleaned_content = strip_model_prefix(content, model_prefix)
121
- yield f"data: {json.dumps(create_chat_completion_data(cleaned_content, request.model, timestamp))}\n\n"
122
-
123
- yield f"data: {json.dumps(create_chat_completion_data('', request.model, timestamp, 'stop'))}\n\n"
124
- yield "data: [DONE]\n\n"
125
- except httpx.HTTPStatusError as e:
126
- logger.error(f"HTTP error occurred: {e}")
127
- raise HTTPException(status_code=e.response.status_code, detail=str(e))
128
- except httpx.RequestError as e:
129
- logger.error(f"Error occurred during request: {e}")
130
- raise HTTPException(status_code=500, detail=str(e))
131
-
132
- async def process_non_streaming_response(request: ChatRequest):
133
- agent_mode = AGENT_MODE.get(request.model, {})
134
- trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
135
- model_prefix = MODEL_PREFIXES.get(request.model, "")
136
- referer_path = MODEL_REFERERS.get(request.model, f"/?model={request.model}")
137
- referer_url = f"{BASE_URL}{referer_path}"
138
-
139
- # Update headers with dynamic Referer
140
- dynamic_headers = headers.copy()
141
- dynamic_headers['Referer'] = referer_url
142
-
143
- json_data = {
144
- "messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
145
- "previewToken": None,
146
- "userId": None,
147
- "codeModelMode": True,
148
- "agentMode": agent_mode,
149
- "trendingAgentMode": trending_agent_mode,
150
- "isMicMode": False,
151
- "userSystemPrompt": None,
152
- "maxTokens": request.max_tokens,
153
- "playgroundTopP": request.top_p,
154
- "playgroundTemperature": request.temperature,
155
- "isChromeExt": False,
156
- "githubToken": None,
157
- "clickedAnswer2": False,
158
- "clickedAnswer3": False,
159
- "clickedForceWebSearch": False,
160
- "visitFromDelta": False,
161
- "mobileClient": False,
162
- "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
163
- }
164
- full_response = ""
165
- async with httpx.AsyncClient() as client:
166
- try:
167
- async with client.stream(
168
- method="POST", url=f"{BASE_URL}/api/chat", headers=dynamic_headers, json=json_data
169
- ) as response:
170
- response.raise_for_status()
171
- async for chunk in response.aiter_text():
172
- full_response += chunk
173
- except httpx.HTTPStatusError as e:
174
- logger.error(f"HTTP error occurred: {e}")
175
- raise HTTPException(status_code=e.response.status_code, detail=str(e))
176
- except httpx.RequestError as e:
177
- logger.error(f"Error occurred during request: {e}")
178
- raise HTTPException(status_code=500, detail=str(e))
179
- if full_response.startswith("$@$v=undefined-rv1$@$"):
180
- full_response = full_response[21:]
181
-
182
- # Strip the model prefix from the full response
183
- cleaned_full_response = strip_model_prefix(full_response, model_prefix)
184
-
185
- return {
186
- "id": f"chatcmpl-{uuid.uuid4()}",
187
- "object": "chat.completion",
188
- "created": int(datetime.now().timestamp()),
189
- "model": request.model,
190
- "choices": [
191
- {
192
- "index": 0,
193
- "message": {"role": "assistant", "content": cleaned_full_response},
194
- "finish_reason": "stop",
195
- }
196
- ],
197
- "usage": None,
198
- }