Niansuh commited on
Commit
5b4d662
·
verified ·
1 Parent(s): 132ad0c

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +13 -12
main.py CHANGED
@@ -173,13 +173,6 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
173
  "webSearchMode": False,
174
  }
175
 
176
- if model in cls.agentMode:
177
- data["agentMode"] = cls.agentMode[model]
178
- elif model in cls.trendingAgentMode:
179
- data["trendingAgentMode"] = cls.trendingAgentMode[model]
180
- elif model in cls.userSelectedModel:
181
- data["userSelectedModel"] = cls.userSelectedModel[model]
182
-
183
  async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
184
  response.raise_for_status()
185
  if model == 'ImageGenerationLV45LJp':
@@ -191,12 +184,19 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
191
  else:
192
  raise Exception("Image URL not found in the response")
193
  else:
 
194
  async for chunk in response.content.iter_any():
195
  if chunk:
196
- decoded_chunk = chunk.decode(errors='ignore') # Handle decoding errors
197
  decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
198
  if decoded_chunk.strip():
199
- yield decoded_chunk
 
 
 
 
 
 
200
 
201
  # FastAPI app setup
202
  app = FastAPI()
@@ -240,7 +240,8 @@ async def chat_completions(request: ChatRequest):
240
  model=request.model,
241
  messages=messages,
242
  image=None, # Pass the image if required
243
- image_name=None # Pass image name if required
 
244
  )
245
  except ModelNotWorkingException as e:
246
  raise HTTPException(status_code=503, detail=str(e))
@@ -249,7 +250,6 @@ async def chat_completions(request: ChatRequest):
249
  async def generate():
250
  async for chunk in async_generator:
251
  if isinstance(chunk, ImageResponse):
252
- # Format the response as a Markdown image
253
  image_markdown = f"![image]({chunk.url})"
254
  yield f"data: {json.dumps(create_response(image_markdown, request.model))}\n\n"
255
  else:
@@ -263,7 +263,7 @@ async def chat_completions(request: ChatRequest):
263
  if isinstance(chunk, ImageResponse):
264
  response_content += f"![image]({chunk.url})\n"
265
  else:
266
- response_content += chunk # Concatenate text responses
267
 
268
  return {
269
  "id": f"chatcmpl-{uuid.uuid4()}",
@@ -286,3 +286,4 @@ async def chat_completions(request: ChatRequest):
286
  @app.get("/niansuhai/v1/models")
287
  async def get_models():
288
  return {"models": Blackbox.models}
 
 
173
  "webSearchMode": False,
174
  }
175
 
 
 
 
 
 
 
 
176
  async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
177
  response.raise_for_status()
178
  if model == 'ImageGenerationLV45LJp':
 
184
  else:
185
  raise Exception("Image URL not found in the response")
186
  else:
187
+ response_content = ""
188
  async for chunk in response.content.iter_any():
189
  if chunk:
190
+ decoded_chunk = chunk.decode(errors='ignore')
191
  decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
192
  if decoded_chunk.strip():
193
+ response_content += decoded_chunk
194
+
195
+ # Check if the response content is empty
196
+ if not response_content.strip():
197
+ raise ModelNotWorkingException(model)
198
+
199
+ yield response_content
200
 
201
  # FastAPI app setup
202
  app = FastAPI()
 
240
  model=request.model,
241
  messages=messages,
242
  image=None, # Pass the image if required
243
+ image_name=None
244
+ # Pass image name if required
245
  )
246
  except ModelNotWorkingException as e:
247
  raise HTTPException(status_code=503, detail=str(e))
 
250
  async def generate():
251
  async for chunk in async_generator:
252
  if isinstance(chunk, ImageResponse):
 
253
  image_markdown = f"![image]({chunk.url})"
254
  yield f"data: {json.dumps(create_response(image_markdown, request.model))}\n\n"
255
  else:
 
263
  if isinstance(chunk, ImageResponse):
264
  response_content += f"![image]({chunk.url})\n"
265
  else:
266
+ response_content += chunk
267
 
268
  return {
269
  "id": f"chatcmpl-{uuid.uuid4()}",
 
286
  @app.get("/niansuhai/v1/models")
287
  async def get_models():
288
  return {"models": Blackbox.models}
289
+