Spaces:
Running
Running
Commit
·
9a294b5
1
Parent(s):
7ccf1c1
fixed model name bug
Browse files- app/routes/chat_api.py +1 -1
- docker-compose.yml +2 -3
app/routes/chat_api.py
CHANGED
@@ -270,7 +270,7 @@ async def chat_completions(fastapi_request: Request, request: OpenAIRequest, api
|
|
270 |
# This means if `request.model` was "gemini-1.5-pro-search", `base_model_name` becomes "gemini-1.5-pro"
|
271 |
# but the API call might need the full "gemini-1.5-pro-search".
|
272 |
# Let's use `request.model` for the API call here, and `base_model_name` for checks like Express eligibility.
|
273 |
-
return await execute_gemini_call(client_to_use,
|
274 |
|
275 |
except Exception as e:
|
276 |
error_msg = f"Unexpected error in chat_completions endpoint: {str(e)}"
|
|
|
270 |
# This means if `request.model` was "gemini-1.5-pro-search", `base_model_name` becomes "gemini-1.5-pro"
|
271 |
# but the API call might need the full "gemini-1.5-pro-search".
|
272 |
# Let's use `request.model` for the API call here, and `base_model_name` for checks like Express eligibility.
|
273 |
+
return await execute_gemini_call(client_to_use, base_model_name, current_prompt_func, generation_config, request)
|
274 |
|
275 |
except Exception as e:
|
276 |
error_msg = f"Unexpected error in chat_completions endpoint: {str(e)}"
|
docker-compose.yml
CHANGED
@@ -2,9 +2,8 @@ version: '3.8'
|
|
2 |
|
3 |
services:
|
4 |
openai-to-gemini:
|
5 |
-
|
6 |
-
|
7 |
-
dockerfile: Dockerfile
|
8 |
ports:
|
9 |
# Map host port 8050 to container port 7860 (for Hugging Face compatibility)
|
10 |
- "8050:7860"
|
|
|
2 |
|
3 |
services:
|
4 |
openai-to-gemini:
|
5 |
+
image: gzzhongqi/vertex2api:latest
|
6 |
+
container_name: vertex2api
|
|
|
7 |
ports:
|
8 |
# Map host port 8050 to container port 7860 (for Hugging Face compatibility)
|
9 |
- "8050:7860"
|