Niansuh commited on
Commit
b63ba8d
·
verified ·
1 Parent(s): 589c428

Update api/utils.py

Browse files
Files changed (1) hide show
  1. api/utils.py +71 -40
api/utils.py CHANGED
@@ -3,6 +3,7 @@ import json
3
  import uuid
4
  import asyncio
5
  import random
 
6
  from typing import Any, Dict, Optional
7
 
8
  import httpx
@@ -15,8 +16,9 @@ from api.config import (
15
  AGENT_MODE,
16
  TRENDING_AGENT_MODE,
17
  MODEL_PREFIXES,
 
18
  )
19
- from api.models import ChatRequest, Message # Ensure Message is imported
20
  from api.logger import setup_logger
21
  from api.validate import getHid # Import the asynchronous getHid function
22
 
@@ -49,28 +51,26 @@ def message_to_dict(message, model_prefix: Optional[str] = None):
49
  content = message.content if isinstance(message.content, str) else message.content[0]["text"]
50
  if model_prefix:
51
  content = f"{model_prefix} {content}"
52
- message_dict = {"role": message.role, "content": content}
53
-
54
- # Check if the message includes image data
55
- if hasattr(message, 'image_base64') and message.image_base64:
56
- image_base64 = message.image_base64
57
- # Ensure the base64 image data has the correct data URI prefix
58
- if not image_base64.startswith('data:image/'):
59
- # Assuming JPEG format; adjust if necessary
60
- image_base64 = f"data:image/jpeg;base64,{image_base64}"
61
- # Generate a unique file path
62
- file_name = f"MultipleFiles/{uuid.uuid4().hex}.jpg"
63
- message_dict['data'] = {
64
- 'imagesData': [
65
- {
66
- 'filePath': file_name,
67
- 'contents': image_base64
68
- }
69
- ],
70
- 'fileText': '',
71
- 'title': ''
72
  }
73
- return message_dict
74
 
75
  # Function to strip model prefix from content if present
76
  def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
@@ -80,7 +80,7 @@ def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
80
  return content[len(model_prefix):].strip()
81
  return content
82
 
83
- # Process streaming response
84
  async def process_streaming_response(request: ChatRequest):
85
  # Generate a unique ID for this request
86
  request_id = f"chatcmpl-{uuid.uuid4()}"
@@ -95,14 +95,19 @@ async def process_streaming_response(request: ChatRequest):
95
 
96
  if request.model == 'o1-preview':
97
  delay_seconds = random.randint(1, 60)
98
- logger.info(f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview' (Request ID: {request_id})")
 
 
 
99
  await asyncio.sleep(delay_seconds)
100
 
101
  # Fetch the h-value for the 'validated' field
102
  h_value = await getHid()
103
  if not h_value:
104
  logger.error("Failed to retrieve h-value for validation.")
105
- raise HTTPException(status_code=500, detail="Validation failed due to missing h-value.")
 
 
106
 
107
  json_data = {
108
  "agentMode": agent_mode,
@@ -115,7 +120,9 @@ async def process_streaming_response(request: ChatRequest):
115
  "isChromeExt": False,
116
  "isMicMode": False,
117
  "maxTokens": request.max_tokens,
118
- "messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
 
 
119
  "mobileClient": False,
120
  "playgroundTemperature": request.temperature,
121
  "playgroundTopP": request.top_p,
@@ -124,7 +131,7 @@ async def process_streaming_response(request: ChatRequest):
124
  "userId": None,
125
  "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
126
  "userSystemPrompt": None,
127
- "validated": h_value,
128
  "visitFromDelta": False,
129
  "webSearchModePrompt": False,
130
  "imageGenerationMode": False, # Added this line
@@ -148,7 +155,9 @@ async def process_streaming_response(request: ChatRequest):
148
  content = content[21:]
149
  # Remove the blocked message if present
150
  if BLOCKED_MESSAGE in content:
151
- logger.info(f"Blocked message detected in response for Request ID {request_id}.")
 
 
152
  content = content.replace(BLOCKED_MESSAGE, '').strip()
153
  if not content:
154
  continue # Skip if content is empty after removal
@@ -161,10 +170,12 @@ async def process_streaming_response(request: ChatRequest):
161
  logger.error(f"HTTP error occurred for Request ID {request_id}: {e}")
162
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
163
  except httpx.RequestError as e:
164
- logger.error(f"Error occurred during request for Request ID {request_id}: {e}")
 
 
165
  raise HTTPException(status_code=500, detail=str(e))
166
 
167
- # Process non-streaming response
168
  async def process_non_streaming_response(request: ChatRequest):
169
  # Generate a unique ID for this request
170
  request_id = f"chatcmpl-{uuid.uuid4()}"
@@ -176,18 +187,27 @@ async def process_non_streaming_response(request: ChatRequest):
176
 
177
  # Adjust headers_api_chat and headers_chat since referer_url is removed
178
  headers_api_chat = get_headers_api_chat(BASE_URL)
179
- headers_chat = get_headers_chat(BASE_URL, next_action=str(uuid.uuid4()), next_router_state_tree=json.dumps([""]))
 
 
 
 
180
 
181
  if request.model == 'o1-preview':
182
  delay_seconds = random.randint(20, 60)
183
- logger.info(f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview' (Request ID: {request_id})")
 
 
 
184
  await asyncio.sleep(delay_seconds)
185
 
186
  # Fetch the h-value for the 'validated' field
187
  h_value = await getHid()
188
  if not h_value:
189
  logger.error("Failed to retrieve h-value for validation.")
190
- raise HTTPException(status_code=500, detail="Validation failed due to missing h-value.")
 
 
191
 
192
  json_data = {
193
  "agentMode": agent_mode,
@@ -200,7 +220,9 @@ async def process_non_streaming_response(request: ChatRequest):
200
  "isChromeExt": False,
201
  "isMicMode": False,
202
  "maxTokens": request.max_tokens,
203
- "messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
 
 
204
  "mobileClient": False,
205
  "playgroundTemperature": request.temperature,
206
  "playgroundTopP": request.top_p,
@@ -209,7 +231,7 @@ async def process_non_streaming_response(request: ChatRequest):
209
  "userId": None,
210
  "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
211
  "userSystemPrompt": None,
212
- "validated": h_value,
213
  "visitFromDelta": False,
214
  "webSearchModePrompt": False,
215
  "imageGenerationMode": False, # Added this line
@@ -219,7 +241,10 @@ async def process_non_streaming_response(request: ChatRequest):
219
  async with httpx.AsyncClient() as client:
220
  try:
221
  async with client.stream(
222
- method="POST", url=f"{BASE_URL}/api/chat", headers=headers_api_chat, json=json_data
 
 
 
223
  ) as response:
224
  response.raise_for_status()
225
  async for chunk in response.aiter_text():
@@ -228,18 +253,24 @@ async def process_non_streaming_response(request: ChatRequest):
228
  logger.error(f"HTTP error occurred for Request ID {request_id}: {e}")
229
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
230
  except httpx.RequestError as e:
231
- logger.error(f"Error occurred during request for Request ID {request_id}: {e}")
 
 
232
  raise HTTPException(status_code=500, detail=str(e))
233
-
234
  if full_response.startswith("$@$v=undefined-rv1$@$"):
235
  full_response = full_response[21:]
236
 
237
  # Remove the blocked message if present
238
  if BLOCKED_MESSAGE in full_response:
239
- logger.info(f"Blocked message detected in response for Request ID {request_id}.")
 
 
240
  full_response = full_response.replace(BLOCKED_MESSAGE, '').strip()
241
  if not full_response:
242
- raise HTTPException(status_code=500, detail="Blocked message detected in response.")
 
 
243
 
244
  cleaned_full_response = strip_model_prefix(full_response, model_prefix)
245
 
 
3
  import uuid
4
  import asyncio
5
  import random
6
+ import string
7
  from typing import Any, Dict, Optional
8
 
9
  import httpx
 
16
  AGENT_MODE,
17
  TRENDING_AGENT_MODE,
18
  MODEL_PREFIXES,
19
+ MODEL_REFERERS
20
  )
21
+ from api.models import ChatRequest
22
  from api.logger import setup_logger
23
  from api.validate import getHid # Import the asynchronous getHid function
24
 
 
51
  content = message.content if isinstance(message.content, str) else message.content[0]["text"]
52
  if model_prefix:
53
  content = f"{model_prefix} {content}"
54
+ if isinstance(message.content, list) and len(message.content) == 2 and "image_url" in message.content[1]:
55
+ # Ensure base64 images are always included for all models
56
+ image_base64 = message.content[1]["image_url"]["url"]
57
+ return {
58
+ "role": message.role,
59
+ "content": content,
60
+ "data": {
61
+ "imageBase64": image_base64,
62
+ "fileText": "",
63
+ "title": "snapshot",
64
+ # Added imagesData field here
65
+ "imagesData": [
66
+ {
67
+ "filePath": f"MultipleFiles/{uuid.uuid4().hex}.jpg",
68
+ "contents": image_base64
69
+ }
70
+ ],
71
+ },
 
 
72
  }
73
+ return {"role": message.role, "content": content}
74
 
75
  # Function to strip model prefix from content if present
76
  def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
 
80
  return content[len(model_prefix):].strip()
81
  return content
82
 
83
+ # Process streaming response with headers from config.py
84
  async def process_streaming_response(request: ChatRequest):
85
  # Generate a unique ID for this request
86
  request_id = f"chatcmpl-{uuid.uuid4()}"
 
95
 
96
  if request.model == 'o1-preview':
97
  delay_seconds = random.randint(1, 60)
98
+ logger.info(
99
+ f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview' "
100
+ f"(Request ID: {request_id})"
101
+ )
102
  await asyncio.sleep(delay_seconds)
103
 
104
  # Fetch the h-value for the 'validated' field
105
  h_value = await getHid()
106
  if not h_value:
107
  logger.error("Failed to retrieve h-value for validation.")
108
+ raise HTTPException(
109
+ status_code=500, detail="Validation failed due to missing h-value."
110
+ )
111
 
112
  json_data = {
113
  "agentMode": agent_mode,
 
120
  "isChromeExt": False,
121
  "isMicMode": False,
122
  "maxTokens": request.max_tokens,
123
+ "messages": [
124
+ message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages
125
+ ],
126
  "mobileClient": False,
127
  "playgroundTemperature": request.temperature,
128
  "playgroundTopP": request.top_p,
 
131
  "userId": None,
132
  "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
133
  "userSystemPrompt": None,
134
+ "validated": h_value, # Dynamically set the validated field
135
  "visitFromDelta": False,
136
  "webSearchModePrompt": False,
137
  "imageGenerationMode": False, # Added this line
 
155
  content = content[21:]
156
  # Remove the blocked message if present
157
  if BLOCKED_MESSAGE in content:
158
+ logger.info(
159
+ f"Blocked message detected in response for Request ID {request_id}."
160
+ )
161
  content = content.replace(BLOCKED_MESSAGE, '').strip()
162
  if not content:
163
  continue # Skip if content is empty after removal
 
170
  logger.error(f"HTTP error occurred for Request ID {request_id}: {e}")
171
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
172
  except httpx.RequestError as e:
173
+ logger.error(
174
+ f"Error occurred during request for Request ID {request_id}: {e}"
175
+ )
176
  raise HTTPException(status_code=500, detail=str(e))
177
 
178
+ # Process non-streaming response with headers from config.py
179
  async def process_non_streaming_response(request: ChatRequest):
180
  # Generate a unique ID for this request
181
  request_id = f"chatcmpl-{uuid.uuid4()}"
 
187
 
188
  # Adjust headers_api_chat and headers_chat since referer_url is removed
189
  headers_api_chat = get_headers_api_chat(BASE_URL)
190
+ headers_chat = get_headers_chat(
191
+ BASE_URL,
192
+ next_action=str(uuid.uuid4()),
193
+ next_router_state_tree=json.dumps([""]),
194
+ )
195
 
196
  if request.model == 'o1-preview':
197
  delay_seconds = random.randint(20, 60)
198
+ logger.info(
199
+ f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview' "
200
+ f"(Request ID: {request_id})"
201
+ )
202
  await asyncio.sleep(delay_seconds)
203
 
204
  # Fetch the h-value for the 'validated' field
205
  h_value = await getHid()
206
  if not h_value:
207
  logger.error("Failed to retrieve h-value for validation.")
208
+ raise HTTPException(
209
+ status_code=500, detail="Validation failed due to missing h-value."
210
+ )
211
 
212
  json_data = {
213
  "agentMode": agent_mode,
 
220
  "isChromeExt": False,
221
  "isMicMode": False,
222
  "maxTokens": request.max_tokens,
223
+ "messages": [
224
+ message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages
225
+ ],
226
  "mobileClient": False,
227
  "playgroundTemperature": request.temperature,
228
  "playgroundTopP": request.top_p,
 
231
  "userId": None,
232
  "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
233
  "userSystemPrompt": None,
234
+ "validated": h_value, # Dynamically set the validated field
235
  "visitFromDelta": False,
236
  "webSearchModePrompt": False,
237
  "imageGenerationMode": False, # Added this line
 
241
  async with httpx.AsyncClient() as client:
242
  try:
243
  async with client.stream(
244
+ method="POST",
245
+ url=f"{BASE_URL}/api/chat",
246
+ headers=headers_api_chat,
247
+ json=json_data,
248
  ) as response:
249
  response.raise_for_status()
250
  async for chunk in response.aiter_text():
 
253
  logger.error(f"HTTP error occurred for Request ID {request_id}: {e}")
254
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
255
  except httpx.RequestError as e:
256
+ logger.error(
257
+ f"Error occurred during request for Request ID {request_id}: {e}"
258
+ )
259
  raise HTTPException(status_code=500, detail=str(e))
260
+
261
  if full_response.startswith("$@$v=undefined-rv1$@$"):
262
  full_response = full_response[21:]
263
 
264
  # Remove the blocked message if present
265
  if BLOCKED_MESSAGE in full_response:
266
+ logger.info(
267
+ f"Blocked message detected in response for Request ID {request_id}."
268
+ )
269
  full_response = full_response.replace(BLOCKED_MESSAGE, '').strip()
270
  if not full_response:
271
+ raise HTTPException(
272
+ status_code=500, detail="Blocked message detected in response."
273
+ )
274
 
275
  cleaned_full_response = strip_model_prefix(full_response, model_prefix)
276