Update README.md
Browse files
README.md
CHANGED
@@ -2257,28 +2257,28 @@ The TMMLU+ dataset is six times larger and contains more balanced subjects compa
|
|
2257 |
| Model | humanities | social sciences | STEM | Others | Average |
|
2258 |
| ----- | ---: | ---: | ---: | ---: | ---: |
|
2259 |
| openrouter/quasar-alpha | 70.38 | 83.07 | 87.89 | 76.42 | 79.44 |
|
2260 |
-
| deepseek-v3 | 73.19 | 81.93 | 82.93 | 74.41 | 78.11 |
|
2261 |
| Qwen/Qwen2.5-72B-Instruct-Turbo | 67.59 | 79.36 | 82.57 | 72.65 | 75.54 |
|
2262 |
| gpt-4o-2024-08-06 | 65.48 | 78.23 | 81.39 | 71.24 | 74.08 |
|
2263 |
| claude-3-5-sonnet-20240620 | 73.23 | 78.27 | 68.50 | 69.35 | 72.34 |
|
2264 |
| gemini-2.0-flash-001 | 67.81 | 75.24 | 74.79 | 65.92 | 70.94 |
|
2265 |
| gemini-2.0-flash-lite-001 | 65.26 | 75.12 | 72.73 | 65.50 | 69.65 |
|
|
|
2266 |
| gemini-2.0-flash-lite-preview-02-05 | 64.66 | 73.48 | 70.00 | 63.90 | 68.01 |
|
2267 |
-
| meta-llama/llama-3.2-90b-vision-instruct | 68.93 | 75.77 | 67.19 | 59.79 | 67.92 |
|
2268 |
-
| qwen/qwen2.5-vl-32b-instruct | 62.93 | 79.24 | 63.61 | 65.16 | 67.73 |
|
2269 |
| Qwen/QwQ-32B-Preview | 57.98 | 70.94 | 72.87 | 63.59 | 66.35 |
|
|
|
2270 |
| claude-3-opus-20240229 | 60.34 | 70.12 | 67.43 | 62.32 | 65.05 |
|
2271 |
| gemini-1.5-pro | 61.84 | 70.29 | 66.18 | 60.30 | 64.65 |
|
2272 |
| gpt-4o-mini-2024-07-18 | 55.01 | 67.09 | 73.16 | 61.36 | 64.15 |
|
2273 |
| mistralai/Mistral-Small-24B-Instruct-2501 | 54.56 | 68.32 | 73.25 | 59.25 | 63.85 |
|
2274 |
-
| llama-3.1-70b
|
2275 |
| Qwen/Qwen2.5-7B-Instruct-Turbo | 54.42 | 64.51 | 68.01 | 58.83 | 61.44 |
|
2276 |
| yentinglin/Llama-3-Taiwan-8B-Instruct | 61.51 | 67.61 | 52.05 | 58.60 | 59.94 |
|
2277 |
-
| meta-llama/llama-4-scout | 53.86 |
|
2278 |
-
| google/gemma-3-27b-it |
|
2279 |
| claude-3-sonnet-20240229 | 52.06 | 59.38 | 49.87 | 51.64 | 53.24 |
|
2280 |
-
| meta-llama/llama-4-maverick | 52.53 | 49.88 | 54.82 | 51.41 | 52.16 |
|
2281 |
| Qwen2-7B-Instruct | 55.66 | 66.40 | 27.18 | 55.32 | 51.14 |
|
|
|
2282 |
| gemma2-9b-it | 45.38 | 55.76 | 49.89 | 48.92 | 49.99 |
|
2283 |
| claude-3-haiku-20240307 | 47.48 | 54.48 | 48.47 | 48.77 | 49.80 |
|
2284 |
| gemini-1.5-flash | 42.99 | 53.42 | 53.47 | 46.56 | 49.11 |
|
@@ -2295,6 +2295,7 @@ The TMMLU+ dataset is six times larger and contains more balanced subjects compa
|
|
2295 |
| taide/Llama3-TAIDE-LX-8B-Chat-Alpha1 | 27.02 | 36.64 | 25.33 | 27.96 | 29.24 |
|
2296 |
|
2297 |
|
|
|
2298 |
## How to use
|
2299 |
|
2300 |
```python
|
|
|
2257 |
| Model | humanities | social sciences | STEM | Others | Average |
|
2258 |
| ----- | ---: | ---: | ---: | ---: | ---: |
|
2259 |
| openrouter/quasar-alpha | 70.38 | 83.07 | 87.89 | 76.42 | 79.44 |
|
2260 |
+
| deepseek-ai/deepseek-v3 | 73.19 | 81.93 | 82.93 | 74.41 | 78.11 |
|
2261 |
| Qwen/Qwen2.5-72B-Instruct-Turbo | 67.59 | 79.36 | 82.57 | 72.65 | 75.54 |
|
2262 |
| gpt-4o-2024-08-06 | 65.48 | 78.23 | 81.39 | 71.24 | 74.08 |
|
2263 |
| claude-3-5-sonnet-20240620 | 73.23 | 78.27 | 68.50 | 69.35 | 72.34 |
|
2264 |
| gemini-2.0-flash-001 | 67.81 | 75.24 | 74.79 | 65.92 | 70.94 |
|
2265 |
| gemini-2.0-flash-lite-001 | 65.26 | 75.12 | 72.73 | 65.50 | 69.65 |
|
2266 |
+
| qwen/qwen2.5-vl-32b-instruct | 62.93 | 75.91 | 68.39 | 65.16 | 68.10 |
|
2267 |
| gemini-2.0-flash-lite-preview-02-05 | 64.66 | 73.48 | 70.00 | 63.90 | 68.01 |
|
|
|
|
|
2268 |
| Qwen/QwQ-32B-Preview | 57.98 | 70.94 | 72.87 | 63.59 | 66.35 |
|
2269 |
+
| meta-llama/llama-3.2-90b-vision-instruct | 61.70 | 72.33 | 67.19 | 61.51 | 65.68 |
|
2270 |
| claude-3-opus-20240229 | 60.34 | 70.12 | 67.43 | 62.32 | 65.05 |
|
2271 |
| gemini-1.5-pro | 61.84 | 70.29 | 66.18 | 60.30 | 64.65 |
|
2272 |
| gpt-4o-mini-2024-07-18 | 55.01 | 67.09 | 73.16 | 61.36 | 64.15 |
|
2273 |
| mistralai/Mistral-Small-24B-Instruct-2501 | 54.56 | 68.32 | 73.25 | 59.25 | 63.85 |
|
2274 |
+
| meta-llama/llama-3.1-70b | 64.94 | 70.14 | 58.63 | 61.33 | 63.76 |
|
2275 |
| Qwen/Qwen2.5-7B-Instruct-Turbo | 54.42 | 64.51 | 68.01 | 58.83 | 61.44 |
|
2276 |
| yentinglin/Llama-3-Taiwan-8B-Instruct | 61.51 | 67.61 | 52.05 | 58.60 | 59.94 |
|
2277 |
+
| meta-llama/llama-4-scout | 53.86 | 62.02 | 60.76 | 58.06 | 58.68 |
|
2278 |
+
| google/gemma-3-27b-it | 55.57 | 58.32 | 55.65 | 49.74 | 54.82 |
|
2279 |
| claude-3-sonnet-20240229 | 52.06 | 59.38 | 49.87 | 51.64 | 53.24 |
|
|
|
2280 |
| Qwen2-7B-Instruct | 55.66 | 66.40 | 27.18 | 55.32 | 51.14 |
|
2281 |
+
| meta-llama/llama-4-maverick | 44.67 | 50.96 | 55.19 | 51.34 | 50.54 |
|
2282 |
| gemma2-9b-it | 45.38 | 55.76 | 49.89 | 48.92 | 49.99 |
|
2283 |
| claude-3-haiku-20240307 | 47.48 | 54.48 | 48.47 | 48.77 | 49.80 |
|
2284 |
| gemini-1.5-flash | 42.99 | 53.42 | 53.47 | 46.56 | 49.11 |
|
|
|
2295 |
| taide/Llama3-TAIDE-LX-8B-Chat-Alpha1 | 27.02 | 36.64 | 25.33 | 27.96 | 29.24 |
|
2296 |
|
2297 |
|
2298 |
+
|
2299 |
## How to use
|
2300 |
|
2301 |
```python
|