bibibi12345 commited on
Commit
aa73bf2
·
1 Parent(s): 6e694c5
Files changed (2) hide show
  1. app/api_helpers.py +1 -1
  2. app/openai_handler.py +2 -2
app/api_helpers.py CHANGED
@@ -365,7 +365,7 @@ async def openai_fake_stream_generator( # Reverted signature: removed thought_ta
365
 
366
  # Use the already configured extra_body which includes the thought_tag_marker
367
  _api_call_task = asyncio.create_task(
368
- openai_client.chat.completions.create(**params_for_non_stream_call, extra_body=openai_extra_body['extra_body'])
369
  )
370
  raw_response = await _api_call_task
371
  full_content_from_api = ""
 
365
 
366
  # Use the already configured extra_body which includes the thought_tag_marker
367
  _api_call_task = asyncio.create_task(
368
+ openai_client.chat.completions.create(**params_for_non_stream_call, extra_body=openai_extra_body)
369
  )
370
  raw_response = await _api_call_task
371
  full_content_from_api = ""
app/openai_handler.py CHANGED
@@ -115,7 +115,7 @@ class OpenAIDirectHandler:
115
  openai_params_for_stream = {**openai_params, "stream": True}
116
  stream_response = await openai_client.chat.completions.create(
117
  **openai_params_for_stream,
118
- extra_body=openai_extra_body['extra_body']
119
  )
120
 
121
  # Create processor for tag-based extraction across chunks
@@ -199,7 +199,7 @@ class OpenAIDirectHandler:
199
  openai_params_non_stream = {**openai_params, "stream": False}
200
  response = await openai_client.chat.completions.create(
201
  **openai_params_non_stream,
202
- extra_body=openai_extra_body['extra_body']
203
  )
204
  response_dict = response.model_dump(exclude_unset=True, exclude_none=True)
205
 
 
115
  openai_params_for_stream = {**openai_params, "stream": True}
116
  stream_response = await openai_client.chat.completions.create(
117
  **openai_params_for_stream,
118
+ extra_body=openai_extra_body
119
  )
120
 
121
  # Create processor for tag-based extraction across chunks
 
199
  openai_params_non_stream = {**openai_params, "stream": False}
200
  response = await openai_client.chat.completions.create(
201
  **openai_params_non_stream,
202
+ extra_body=openai_extra_body
203
  )
204
  response_dict = response.model_dump(exclude_unset=True, exclude_none=True)
205