Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -52,9 +52,9 @@ MODEL_CONFIGS = {
|
|
52 |
"gpt-4.1": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
53 |
"gpt-4.1-mini": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
54 |
"gpt-4.1-nano": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
55 |
-
"o3-mini": {"
|
56 |
-
"o1": {"
|
57 |
-
"o1-mini": {"
|
58 |
"o1-preview": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
59 |
"Phi-4-multimodal-instruct": {"max_tokens": 4000, "param_name": "max_tokens", "api_version": None, "category": "Microsoft", "warning": None},
|
60 |
"Mistral-large-2407": {"max_tokens": 4000, "param_name": "max_tokens", "api_version": None, "category": "Mistral", "warning": None},
|
|
|
52 |
"gpt-4.1": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
53 |
"gpt-4.1-mini": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
54 |
"gpt-4.1-nano": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
55 |
+
"o3-mini": {"max_completion_tokens": 100000, "param_name": "max_completion_tokens", "api_version": "2024-12-01-preview", "category": "OpenAI", "warning": None},
|
56 |
+
"o1": {"ax_completion_tokens": 100000, "param_name": "max_completion_tokens", "api_version": "2024-12-01-preview", "category": "OpenAI", "warning": None},
|
57 |
+
"o1-mini": {"ax_completion_tokens": 66000, "param_name": "max_completion_tokens", "api_version": "2024-12-01-preview", "category": "OpenAI", "warning": None},
|
58 |
"o1-preview": {"max_tokens": 33000, "param_name": "max_tokens", "api_version": None, "category": "OpenAI", "warning": None},
|
59 |
"Phi-4-multimodal-instruct": {"max_tokens": 4000, "param_name": "max_tokens", "api_version": None, "category": "Microsoft", "warning": None},
|
60 |
"Mistral-large-2407": {"max_tokens": 4000, "param_name": "max_tokens", "api_version": None, "category": "Mistral", "warning": None},
|