theblackcat102 commited on
Commit
dba81a7
·
verified ·
1 Parent(s): bb84be9

Add optimus alpha

Browse files
Files changed (1) hide show
  1. README.md +11 -7
README.md CHANGED
@@ -2257,28 +2257,29 @@ The TMMLU+ dataset is six times larger and contains more balanced subjects compa
2257
  | Model | humanities | social sciences | STEM | Others | Average |
2258
  | ----- | ---: | ---: | ---: | ---: | ---: |
2259
  | openrouter/quasar-alpha | 70.38 | 83.07 | 87.89 | 76.42 | 79.44 |
2260
- | deepseek-ai/deepseek-v3 | 73.19 | 81.93 | 82.93 | 74.41 | 78.11 |
 
2261
  | Qwen/Qwen2.5-72B-Instruct-Turbo | 67.59 | 79.36 | 82.57 | 72.65 | 75.54 |
2262
  | gpt-4o-2024-08-06 | 65.48 | 78.23 | 81.39 | 71.24 | 74.08 |
2263
  | claude-3-5-sonnet-20240620 | 73.23 | 78.27 | 68.50 | 69.35 | 72.34 |
2264
  | gemini-2.0-flash-001 | 67.81 | 75.24 | 74.79 | 65.92 | 70.94 |
2265
  | gemini-2.0-flash-lite-001 | 65.26 | 75.12 | 72.73 | 65.50 | 69.65 |
2266
- | qwen/qwen2.5-vl-32b-instruct | 62.93 | 75.91 | 68.39 | 65.16 | 68.10 |
2267
  | gemini-2.0-flash-lite-preview-02-05 | 64.66 | 73.48 | 70.00 | 63.90 | 68.01 |
 
2268
  | Qwen/QwQ-32B-Preview | 57.98 | 70.94 | 72.87 | 63.59 | 66.35 |
2269
- | meta-llama/llama-3.2-90b-vision-instruct | 61.70 | 72.33 | 67.19 | 61.51 | 65.68 |
2270
  | claude-3-opus-20240229 | 60.34 | 70.12 | 67.43 | 62.32 | 65.05 |
2271
  | gemini-1.5-pro | 61.84 | 70.29 | 66.18 | 60.30 | 64.65 |
2272
  | gpt-4o-mini-2024-07-18 | 55.01 | 67.09 | 73.16 | 61.36 | 64.15 |
2273
  | mistralai/Mistral-Small-24B-Instruct-2501 | 54.56 | 68.32 | 73.25 | 59.25 | 63.85 |
2274
- | meta-llama/llama-3.1-70b | 64.94 | 70.14 | 58.63 | 61.33 | 63.76 |
 
2275
  | Qwen/Qwen2.5-7B-Instruct-Turbo | 54.42 | 64.51 | 68.01 | 58.83 | 61.44 |
2276
  | yentinglin/Llama-3-Taiwan-8B-Instruct | 61.51 | 67.61 | 52.05 | 58.60 | 59.94 |
2277
- | meta-llama/llama-4-scout | 53.86 | 62.02 | 60.76 | 58.06 | 58.68 |
2278
- | google/gemma-3-27b-it | 55.57 | 58.32 | 55.65 | 49.74 | 54.82 |
2279
  | claude-3-sonnet-20240229 | 52.06 | 59.38 | 49.87 | 51.64 | 53.24 |
2280
  | Qwen2-7B-Instruct | 55.66 | 66.40 | 27.18 | 55.32 | 51.14 |
2281
- | meta-llama/llama-4-maverick | 44.67 | 50.96 | 55.19 | 51.34 | 50.54 |
2282
  | gemma2-9b-it | 45.38 | 55.76 | 49.89 | 48.92 | 49.99 |
2283
  | claude-3-haiku-20240307 | 47.48 | 54.48 | 48.47 | 48.77 | 49.80 |
2284
  | gemini-1.5-flash | 42.99 | 53.42 | 53.47 | 46.56 | 49.11 |
@@ -2293,6 +2294,9 @@ The TMMLU+ dataset is six times larger and contains more balanced subjects compa
2293
  | reka-edge | 31.84 | 39.40 | 30.02 | 32.36 | 33.41 |
2294
  | meta-llama/Llama-3-8b-chat-hf | 28.91 | 34.19 | 31.52 | 31.79 | 31.60 |
2295
  | taide/Llama3-TAIDE-LX-8B-Chat-Alpha1 | 27.02 | 36.64 | 25.33 | 27.96 | 29.24 |
 
 
 
2296
 
2297
 
2298
 
 
2257
  | Model | humanities | social sciences | STEM | Others | Average |
2258
  | ----- | ---: | ---: | ---: | ---: | ---: |
2259
  | openrouter/quasar-alpha | 70.38 | 83.07 | 87.89 | 76.42 | 79.44 |
2260
+ | deepseek-chat | 73.19 | 81.93 | 82.93 | 74.41 | 78.11 |
2261
+ | openrouter/optimus-alpha | 67.73 | 78.68 | 84.14 | 74.73 | 76.32 |
2262
  | Qwen/Qwen2.5-72B-Instruct-Turbo | 67.59 | 79.36 | 82.57 | 72.65 | 75.54 |
2263
  | gpt-4o-2024-08-06 | 65.48 | 78.23 | 81.39 | 71.24 | 74.08 |
2264
  | claude-3-5-sonnet-20240620 | 73.23 | 78.27 | 68.50 | 69.35 | 72.34 |
2265
  | gemini-2.0-flash-001 | 67.81 | 75.24 | 74.79 | 65.92 | 70.94 |
2266
  | gemini-2.0-flash-lite-001 | 65.26 | 75.12 | 72.73 | 65.50 | 69.65 |
 
2267
  | gemini-2.0-flash-lite-preview-02-05 | 64.66 | 73.48 | 70.00 | 63.90 | 68.01 |
2268
+ | qwen/qwen2.5-vl-32b-instruct | 66.40 | 71.09 | 68.39 | 64.98 | 67.71 |
2269
  | Qwen/QwQ-32B-Preview | 57.98 | 70.94 | 72.87 | 63.59 | 66.35 |
 
2270
  | claude-3-opus-20240229 | 60.34 | 70.12 | 67.43 | 62.32 | 65.05 |
2271
  | gemini-1.5-pro | 61.84 | 70.29 | 66.18 | 60.30 | 64.65 |
2272
  | gpt-4o-mini-2024-07-18 | 55.01 | 67.09 | 73.16 | 61.36 | 64.15 |
2273
  | mistralai/Mistral-Small-24B-Instruct-2501 | 54.56 | 68.32 | 73.25 | 59.25 | 63.85 |
2274
+ | meta-llama/llama-3.2-90b-vision-instruct | 59.00 | 68.11 | 66.60 | 61.36 | 63.76 |
2275
+ | llama-3.1-70b-versatile | 64.94 | 70.14 | 58.63 | 61.33 | 63.76 |
2276
  | Qwen/Qwen2.5-7B-Instruct-Turbo | 54.42 | 64.51 | 68.01 | 58.83 | 61.44 |
2277
  | yentinglin/Llama-3-Taiwan-8B-Instruct | 61.51 | 67.61 | 52.05 | 58.60 | 59.94 |
2278
+ | meta-llama/llama-4-scout | 51.00 | 59.91 | 62.53 | 56.36 | 57.45 |
2279
+ | google/gemma-3-27b-it | 52.54 | 57.69 | 58.59 | 50.50 | 54.83 |
2280
  | claude-3-sonnet-20240229 | 52.06 | 59.38 | 49.87 | 51.64 | 53.24 |
2281
  | Qwen2-7B-Instruct | 55.66 | 66.40 | 27.18 | 55.32 | 51.14 |
2282
+ | meta-llama/llama-4-maverick | 45.05 | 51.06 | 55.37 | 52.53 | 51.00 |
2283
  | gemma2-9b-it | 45.38 | 55.76 | 49.89 | 48.92 | 49.99 |
2284
  | claude-3-haiku-20240307 | 47.48 | 54.48 | 48.47 | 48.77 | 49.80 |
2285
  | gemini-1.5-flash | 42.99 | 53.42 | 53.47 | 46.56 | 49.11 |
 
2294
  | reka-edge | 31.84 | 39.40 | 30.02 | 32.36 | 33.41 |
2295
  | meta-llama/Llama-3-8b-chat-hf | 28.91 | 34.19 | 31.52 | 31.79 | 31.60 |
2296
  | taide/Llama3-TAIDE-LX-8B-Chat-Alpha1 | 27.02 | 36.64 | 25.33 | 27.96 | 29.24 |
2297
+ | o1-preview-2024-09-12 | 0.00 | 0.00 | 81.55 | 0.00 | 20.39 |
2298
+ | claude-instant-1 | 0.00 | 0.00 | 25.24 | 20.30 | 11.39 |
2299
+ | Llama-3.1-8B-Instruct | 0.00 | 0.00 | 15.53 | 0.00 | 3.88 |
2300
 
2301
 
2302