Spaces:
Runtime error
Runtime error
remove temperature to use default instead
Browse files- app.py +1 -2
- context_window.json +2 -0
app.py
CHANGED
@@ -254,8 +254,7 @@ def chat_with_models(
|
|
254 |
try:
|
255 |
request_params = {
|
256 |
"model": model_name,
|
257 |
-
"messages": truncated_input
|
258 |
-
"temperature": 0,
|
259 |
}
|
260 |
response = openai_client.chat.completions.create(**request_params)
|
261 |
model_response["content"] = response.choices[0].message.content
|
|
|
254 |
try:
|
255 |
request_params = {
|
256 |
"model": model_name,
|
257 |
+
"messages": truncated_input
|
|
|
258 |
}
|
259 |
response = openai_client.chat.completions.create(**request_params)
|
260 |
model_response["content"] = response.choices[0].message.content
|
context_window.json
CHANGED
@@ -15,6 +15,8 @@
|
|
15 |
"llama-3.1-70b": 128000,
|
16 |
"llama-3.3-70b": 128000,
|
17 |
"o1": 128000,
|
|
|
|
|
18 |
"Qwen2.5-32B-Instruct": 131072,
|
19 |
"qwen2.5-72b": 32768,
|
20 |
"Qwen2.5-72B-Instruct": 131072,
|
|
|
15 |
"llama-3.1-70b": 128000,
|
16 |
"llama-3.3-70b": 128000,
|
17 |
"o1": 128000,
|
18 |
+
"o1-mini": 128000,
|
19 |
+
"o3-mini": 200000,
|
20 |
"Qwen2.5-32B-Instruct": 131072,
|
21 |
"qwen2.5-72b": 32768,
|
22 |
"Qwen2.5-72B-Instruct": 131072,
|