hzruo commited on
Commit
f744763
Β·
verified Β·
1 Parent(s): 4385e66

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +64 -48
main.py CHANGED
@@ -172,6 +172,60 @@ async def chat_completions(
172
  stream=True
173
  )
174
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
175
  def generate():
176
  content_buffer = ""
177
  for line in response.iter_lines():
@@ -194,56 +248,18 @@ async def chat_completions(
194
  match = re.search(r"jobId='([^']+)' prompt='([^']+)' negative='([^']*)'", msg_data)
195
  if match:
196
  job_id, prompt, negative = match.groups()
197
- print(f"Starting image generation process for job_id: {job_id}")
198
-
199
- # η«‹ε³ε‘ι€ζ€θ€ƒεΌ€ε§‹ηš„ζΆˆζ―
200
- start_time = time.time()
201
- think_msg = "<think>\n"
202
- think_msg += "🎨 Generating image...\n\n"
203
- think_msg += f"Prompt: {prompt}\n"
204
-
205
- # ε‘ι€ζ€θ€ƒεΌ€ε§‹ζΆˆζ― (使用标准 OpenAI 格式)
206
- chunk = {
207
- "id": f"chatcmpl-{chat_id}",
208
- "object": "chat.completion.chunk",
209
- "created": int(time.time()),
210
- "model": data.get('model'),
211
- "choices": [{
212
- "delta": {"content": think_msg},
213
- "index": 0,
214
- "finish_reason": None
215
- }]
216
- }
217
- yield f"data: {json.dumps(chunk)}\n\n"
218
-
219
- # ζ£€ζŸ₯ε›Ύη‰‡ηŠΆζ€ε’ŒδΈŠδΌ 
220
- result = await upload_to_xinyew(
221
- await check_image_status(session, job_id, headers),
222
- job_id
223
- )
224
 
225
- # ε‘ι€η»“ζŸζΆˆζ―
226
- elapsed_time = time.time() - start_time
227
- end_msg = f"\nπŸ€” Thinking for {elapsed_time:.1f}s...\n"
228
- end_msg += "</think>\n\n"
229
- if result:
230
- end_msg += f"![Generated Image]({result})"
231
- else:
232
- end_msg += "*Image generation or upload failed.*\n"
233
 
234
- # ε‘ι€η»“ζŸζΆˆζ― (使用标准 OpenAI 格式)
235
- chunk = {
236
- "id": f"chatcmpl-{chat_id}",
237
- "object": "chat.completion.chunk",
238
- "created": int(time.time()),
239
- "model": data.get('model'),
240
- "choices": [{
241
- "delta": {"content": end_msg},
242
- "index": 0,
243
- "finish_reason": None
244
- }]
245
- }
246
- yield f"data: {json.dumps(chunk)}\n\n"
247
  continue
248
 
249
  content_buffer += msg_data
 
172
  stream=True
173
  )
174
 
175
+ async def process_image_gen(job_id, prompt, negative):
176
+ """ε€„η†ε›Ύη‰‡η”Ÿζˆηš„εΌ‚ζ­₯函数"""
177
+ print(f"Starting image generation process for job_id: {job_id}")
178
+
179
+ # η«‹ε³ε‘ι€ζ€θ€ƒεΌ€ε§‹ηš„ζΆˆζ―
180
+ start_time = time.time()
181
+ think_msg = "<think>\n"
182
+ think_msg += "🎨 Generating image...\n\n"
183
+ think_msg += f"Prompt: {prompt}\n"
184
+
185
+ # ε‘ι€ζ€θ€ƒεΌ€ε§‹ζΆˆζ―
186
+ chunk1 = {
187
+ "id": f"chatcmpl-{chat_id}",
188
+ "object": "chat.completion.chunk",
189
+ "created": int(time.time()),
190
+ "model": data.get('model'),
191
+ "choices": [{
192
+ "delta": {"content": think_msg},
193
+ "index": 0,
194
+ "finish_reason": None
195
+ }]
196
+ }
197
+
198
+ # ζ£€ζŸ₯ε›Ύη‰‡ηŠΆζ€ε’ŒδΈŠδΌ 
199
+ image_base64 = await check_image_status(session, job_id, headers)
200
+ if image_base64:
201
+ result = await upload_to_xinyew(image_base64, job_id)
202
+ else:
203
+ result = None
204
+
205
+ # ε‘ι€η»“ζŸζΆˆζ―
206
+ elapsed_time = time.time() - start_time
207
+ end_msg = f"\nπŸ€” Thinking for {elapsed_time:.1f}s...\n"
208
+ end_msg += "</think>\n\n"
209
+ if result:
210
+ end_msg += f"![Generated Image]({result})"
211
+ else:
212
+ end_msg += "*Image generation or upload failed.*\n"
213
+
214
+ # ε‘ι€η»“ζŸζΆˆζ―
215
+ chunk2 = {
216
+ "id": f"chatcmpl-{chat_id}",
217
+ "object": "chat.completion.chunk",
218
+ "created": int(time.time()),
219
+ "model": data.get('model'),
220
+ "choices": [{
221
+ "delta": {"content": end_msg},
222
+ "index": 0,
223
+ "finish_reason": None
224
+ }]
225
+ }
226
+
227
+ return [chunk1, chunk2]
228
+
229
  def generate():
230
  content_buffer = ""
231
  for line in response.iter_lines():
 
248
  match = re.search(r"jobId='([^']+)' prompt='([^']+)' negative='([^']*)'", msg_data)
249
  if match:
250
  job_id, prompt, negative = match.groups()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
251
 
252
+ # εˆ›ε»Ίζ–°ηš„δΊ‹δ»ΆεΎͺ环
253
+ loop = asyncio.new_event_loop()
254
+ asyncio.set_event_loop(loop)
255
+ try:
256
+ chunks = loop.run_until_complete(process_image_gen(job_id, prompt, negative))
257
+ finally:
258
+ loop.close()
 
259
 
260
+ # ε‘ι€ζ‰€ζœ‰ζΆˆζ―ε—
261
+ for chunk in chunks:
262
+ yield f"data: {json.dumps(chunk)}\n\n"
 
 
 
 
 
 
 
 
 
 
263
  continue
264
 
265
  content_buffer += msg_data