Niansuh commited on
Commit
5640f9d
·
verified ·
1 Parent(s): 4d91e65

Update api/utils.py

Browse files
Files changed (1) hide show
  1. api/utils.py +76 -82
api/utils.py CHANGED
@@ -3,6 +3,7 @@ import json
3
  import uuid
4
  import asyncio
5
  import random
 
6
  from typing import Any, Dict, Optional
7
 
8
  import httpx
@@ -15,6 +16,7 @@ from api.config import (
15
  AGENT_MODE,
16
  TRENDING_AGENT_MODE,
17
  MODEL_PREFIXES,
 
18
  )
19
  from api.models import ChatRequest
20
  from api.logger import setup_logger
@@ -70,18 +72,38 @@ def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
70
  return content[len(model_prefix):].strip()
71
  return content
72
 
73
- # Helper function to build JSON data for the request
74
- def build_json_data(request: ChatRequest, h_value: str, model_prefix: Optional[str]):
 
 
 
 
75
  agent_mode = AGENT_MODE.get(request.model, {})
76
  trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
77
- return {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  "agentMode": agent_mode,
79
  "clickedAnswer2": False,
80
  "clickedAnswer3": False,
81
  "clickedForceWebSearch": False,
82
  "codeModelMode": True,
83
  "githubToken": None,
84
- "id": None,
85
  "isChromeExt": False,
86
  "isMicMode": False,
87
  "maxTokens": request.max_tokens,
@@ -94,35 +116,10 @@ def build_json_data(request: ChatRequest, h_value: str, model_prefix: Optional[s
94
  "userId": None,
95
  "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
96
  "userSystemPrompt": None,
97
- "validated": "00f37b34-a166-4efb-bce5-1312d87f2f94",
98
  "visitFromDelta": False,
99
  }
100
 
101
- # Process streaming response with headers from config.py
102
- async def process_streaming_response(request: ChatRequest):
103
- logger.info(f"Processing request - Model: {request.model}")
104
-
105
- model_prefix = MODEL_PREFIXES.get(request.model, "")
106
-
107
- # Adjust headers_api_chat since referer_url is removed
108
- headers_api_chat = get_headers_api_chat(BASE_URL)
109
-
110
- if request.model == 'o1-preview':
111
- delay_seconds = random.randint(1, 60)
112
- logger.info(f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview'")
113
- await asyncio.sleep(delay_seconds)
114
-
115
- # Fetch the h-value for the 'validated' field
116
- h_value = await getHid()
117
- if not h_value:
118
- logger.error("Failed to retrieve h-value for validation.")
119
- raise HTTPException(status_code=500, detail="Validation failed due to missing h-value.")
120
-
121
- json_data = build_json_data(request, h_value, model_prefix)
122
-
123
- # Initialize rolling buffer to handle BLOCKED_MESSAGE split across chunks
124
- rolling_buffer = ""
125
-
126
  async with httpx.AsyncClient() as client:
127
  try:
128
  async with client.stream(
@@ -134,68 +131,46 @@ async def process_streaming_response(request: ChatRequest):
134
  ) as response:
135
  response.raise_for_status()
136
  async for chunk in response.aiter_text():
 
137
  if chunk:
138
- # Combine rolling buffer with current chunk
139
- combined_chunk = rolling_buffer + chunk
140
-
141
- # Remove any occurrence of BLOCKED_MESSAGE in combined_chunk
142
- if BLOCKED_MESSAGE in combined_chunk:
143
- logger.info("Blocked message detected in response.")
144
- combined_chunk = combined_chunk.replace(BLOCKED_MESSAGE, '')
145
-
146
- # Remove model prefix if present
147
- cleaned_content = strip_model_prefix(combined_chunk, model_prefix)
148
-
149
- # Yield the cleaned content
150
- timestamp = int(datetime.now().timestamp())
151
  yield f"data: {json.dumps(create_chat_completion_data(cleaned_content, request.model, timestamp))}\n\n"
152
 
153
- # Update rolling buffer with the end of the combined_chunk
154
- # Keep only the last len(BLOCKED_MESSAGE) - 1 characters
155
- rolling_buffer = combined_chunk[-(len(BLOCKED_MESSAGE) - 1):]
156
- else:
157
- # If chunk is empty, reset rolling buffer
158
- rolling_buffer = ""
159
-
160
- # After streaming is done, check if any remaining content is in the rolling buffer
161
- if rolling_buffer:
162
- # Remove any occurrence of BLOCKED_MESSAGE in rolling buffer
163
- if BLOCKED_MESSAGE in rolling_buffer:
164
- logger.info("Blocked message detected in remaining buffer.")
165
- rolling_buffer = rolling_buffer.replace(BLOCKED_MESSAGE, '')
166
-
167
- cleaned_content = strip_model_prefix(rolling_buffer, model_prefix)
168
- timestamp = int(datetime.now().timestamp())
169
- yield f"data: {json.dumps(create_chat_completion_data(cleaned_content, request.model, timestamp))}\n\n"
170
-
171
- # Signal the end of the streaming
172
- timestamp = int(datetime.now().timestamp())
173
  yield f"data: {json.dumps(create_chat_completion_data('', request.model, timestamp, 'stop'))}\n\n"
174
  yield "data: [DONE]\n\n"
175
  except httpx.HTTPStatusError as e:
176
- logger.error(f"HTTP error occurred: {e}")
177
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
178
  except httpx.RequestError as e:
179
- logger.error(f"Error occurred during request: {e}")
180
  raise HTTPException(status_code=500, detail=str(e))
181
 
182
  # Process non-streaming response with headers from config.py
183
  async def process_non_streaming_response(request: ChatRequest):
184
- logger.info(f"Processing request - Model: {request.model}")
 
 
185
 
 
 
186
  model_prefix = MODEL_PREFIXES.get(request.model, "")
187
 
188
  # Adjust headers_api_chat and headers_chat since referer_url is removed
189
  headers_api_chat = get_headers_api_chat(BASE_URL)
190
- headers_chat = get_headers_chat(
191
- BASE_URL,
192
- next_action=str(uuid.uuid4()),
193
- next_router_state_tree=json.dumps([""])
194
- )
195
 
196
  if request.model == 'o1-preview':
197
  delay_seconds = random.randint(20, 60)
198
- logger.info(f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview'")
199
  await asyncio.sleep(delay_seconds)
200
 
201
  # Fetch the h-value for the 'validated' field
@@ -204,33 +179,52 @@ async def process_non_streaming_response(request: ChatRequest):
204
  logger.error("Failed to retrieve h-value for validation.")
205
  raise HTTPException(status_code=500, detail="Validation failed due to missing h-value.")
206
 
207
- json_data = build_json_data(request, h_value, model_prefix)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
 
209
  full_response = ""
210
  async with httpx.AsyncClient() as client:
211
  try:
212
  async with client.stream(
213
- method="POST",
214
- url=f"{BASE_URL}/api/chat",
215
- headers=headers_api_chat,
216
- json=json_data,
217
  ) as response:
218
  response.raise_for_status()
219
  async for chunk in response.aiter_text():
220
  full_response += chunk
221
  except httpx.HTTPStatusError as e:
222
- logger.error(f"HTTP error occurred: {e}")
223
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
224
  except httpx.RequestError as e:
225
- logger.error(f"Error occurred during request: {e}")
226
  raise HTTPException(status_code=500, detail=str(e))
227
-
228
  if full_response.startswith("$@$v=undefined-rv1$@$"):
229
  full_response = full_response[21:]
230
 
231
  # Remove the blocked message if present
232
  if BLOCKED_MESSAGE in full_response:
233
- logger.info("Blocked message detected in response.")
234
  full_response = full_response.replace(BLOCKED_MESSAGE, '').strip()
235
  if not full_response:
236
  raise HTTPException(status_code=500, detail="Blocked message detected in response.")
@@ -250,4 +244,4 @@ async def process_non_streaming_response(request: ChatRequest):
250
  }
251
  ],
252
  "usage": None,
253
- }
 
3
  import uuid
4
  import asyncio
5
  import random
6
+ import string
7
  from typing import Any, Dict, Optional
8
 
9
  import httpx
 
16
  AGENT_MODE,
17
  TRENDING_AGENT_MODE,
18
  MODEL_PREFIXES,
19
+ MODEL_REFERERS
20
  )
21
  from api.models import ChatRequest
22
  from api.logger import setup_logger
 
72
  return content[len(model_prefix):].strip()
73
  return content
74
 
75
+ # Process streaming response with headers from config.py
76
+ async def process_streaming_response(request: ChatRequest):
77
+ # Generate a unique ID for this request
78
+ request_id = f"chatcmpl-{uuid.uuid4()}"
79
+ logger.info(f"Processing request with ID: {request_id} - Model: {request.model}")
80
+
81
  agent_mode = AGENT_MODE.get(request.model, {})
82
  trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
83
+ model_prefix = MODEL_PREFIXES.get(request.model, "")
84
+
85
+ # Adjust headers_api_chat since referer_url is removed
86
+ headers_api_chat = get_headers_api_chat(BASE_URL)
87
+
88
+ if request.model == 'o1-preview':
89
+ delay_seconds = random.randint(1, 60)
90
+ logger.info(f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview' (Request ID: {request_id})")
91
+ await asyncio.sleep(delay_seconds)
92
+
93
+ # Fetch the h-value for the 'validated' field
94
+ h_value = await getHid()
95
+ if not h_value:
96
+ logger.error("Failed to retrieve h-value for validation.")
97
+ raise HTTPException(status_code=500, detail="Validation failed due to missing h-value.")
98
+
99
+ json_data = {
100
  "agentMode": agent_mode,
101
  "clickedAnswer2": False,
102
  "clickedAnswer3": False,
103
  "clickedForceWebSearch": False,
104
  "codeModelMode": True,
105
  "githubToken": None,
106
+ "id": None, # Using request_id instead of chat_id
107
  "isChromeExt": False,
108
  "isMicMode": False,
109
  "maxTokens": request.max_tokens,
 
116
  "userId": None,
117
  "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
118
  "userSystemPrompt": None,
119
+ "validated": h_value, # Dynamically set the validated field
120
  "visitFromDelta": False,
121
  }
122
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
  async with httpx.AsyncClient() as client:
124
  try:
125
  async with client.stream(
 
131
  ) as response:
132
  response.raise_for_status()
133
  async for chunk in response.aiter_text():
134
+ timestamp = int(datetime.now().timestamp())
135
  if chunk:
136
+ content = chunk
137
+ if content.startswith("$@$v=undefined-rv1$@$"):
138
+ content = content[21:]
139
+ # Remove the blocked message if present
140
+ if BLOCKED_MESSAGE in content:
141
+ logger.info(f"Blocked message detected in response for Request ID {request_id}.")
142
+ content = content.replace(BLOCKED_MESSAGE, '').strip()
143
+ if not content:
144
+ continue # Skip if content is empty after removal
145
+ cleaned_content = strip_model_prefix(content, model_prefix)
 
 
 
146
  yield f"data: {json.dumps(create_chat_completion_data(cleaned_content, request.model, timestamp))}\n\n"
147
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  yield f"data: {json.dumps(create_chat_completion_data('', request.model, timestamp, 'stop'))}\n\n"
149
  yield "data: [DONE]\n\n"
150
  except httpx.HTTPStatusError as e:
151
+ logger.error(f"HTTP error occurred for Request ID {request_id}: {e}")
152
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
153
  except httpx.RequestError as e:
154
+ logger.error(f"Error occurred during request for Request ID {request_id}: {e}")
155
  raise HTTPException(status_code=500, detail=str(e))
156
 
157
  # Process non-streaming response with headers from config.py
158
  async def process_non_streaming_response(request: ChatRequest):
159
+ # Generate a unique ID for this request
160
+ request_id = f"chatcmpl-{uuid.uuid4()}"
161
+ logger.info(f"Processing request with ID: {request_id} - Model: {request.model}")
162
 
163
+ agent_mode = AGENT_MODE.get(request.model, {})
164
+ trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
165
  model_prefix = MODEL_PREFIXES.get(request.model, "")
166
 
167
  # Adjust headers_api_chat and headers_chat since referer_url is removed
168
  headers_api_chat = get_headers_api_chat(BASE_URL)
169
+ headers_chat = get_headers_chat(BASE_URL, next_action=str(uuid.uuid4()), next_router_state_tree=json.dumps([""]))
 
 
 
 
170
 
171
  if request.model == 'o1-preview':
172
  delay_seconds = random.randint(20, 60)
173
+ logger.info(f"Introducing a delay of {delay_seconds} seconds for model 'o1-preview' (Request ID: {request_id})")
174
  await asyncio.sleep(delay_seconds)
175
 
176
  # Fetch the h-value for the 'validated' field
 
179
  logger.error("Failed to retrieve h-value for validation.")
180
  raise HTTPException(status_code=500, detail="Validation failed due to missing h-value.")
181
 
182
+ json_data = {
183
+ "agentMode": agent_mode,
184
+ "clickedAnswer2": False,
185
+ "clickedAnswer3": False,
186
+ "clickedForceWebSearch": False,
187
+ "codeModelMode": True,
188
+ "githubToken": None,
189
+ "id": None, # Using request_id instead of chat_id
190
+ "isChromeExt": False,
191
+ "isMicMode": False,
192
+ "maxTokens": request.max_tokens,
193
+ "messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
194
+ "mobileClient": False,
195
+ "playgroundTemperature": request.temperature,
196
+ "playgroundTopP": request.top_p,
197
+ "previewToken": None,
198
+ "trendingAgentMode": trending_agent_mode,
199
+ "userId": None,
200
+ "userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
201
+ "userSystemPrompt": None,
202
+ "validated": h_value, # Dynamically set the validated field
203
+ "visitFromDelta": False,
204
+ }
205
 
206
  full_response = ""
207
  async with httpx.AsyncClient() as client:
208
  try:
209
  async with client.stream(
210
+ method="POST", url=f"{BASE_URL}/api/chat", headers=headers_api_chat, json=json_data
 
 
 
211
  ) as response:
212
  response.raise_for_status()
213
  async for chunk in response.aiter_text():
214
  full_response += chunk
215
  except httpx.HTTPStatusError as e:
216
+ logger.error(f"HTTP error occurred for Request ID {request_id}: {e}")
217
  raise HTTPException(status_code=e.response.status_code, detail=str(e))
218
  except httpx.RequestError as e:
219
+ logger.error(f"Error occurred during request for Request ID {request_id}: {e}")
220
  raise HTTPException(status_code=500, detail=str(e))
221
+
222
  if full_response.startswith("$@$v=undefined-rv1$@$"):
223
  full_response = full_response[21:]
224
 
225
  # Remove the blocked message if present
226
  if BLOCKED_MESSAGE in full_response:
227
+ logger.info(f"Blocked message detected in response for Request ID {request_id}.")
228
  full_response = full_response.replace(BLOCKED_MESSAGE, '').strip()
229
  if not full_response:
230
  raise HTTPException(status_code=500, detail="Blocked message detected in response.")
 
244
  }
245
  ],
246
  "usage": None,
247
+ }