tabedini commited on
Commit
f201b78
·
verified ·
1 Parent(s): 63b8c43

Update leaderboard_data.jsonl

Browse files
Files changed (1) hide show
  1. leaderboard_data.jsonl +3 -2
leaderboard_data.jsonl CHANGED
@@ -35,8 +35,9 @@
35
  {"Model":"CohereForAI/c4ai-command-r7b-12-2024", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 34.03, "ARC Easy": 77.01, "ARC Challenge": 66.44, "MMLU Pro": 23.62, "AUT Multiple Choice Persian": 50.49, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/c4ai-command-r7b-12-2024"}
36
  {"Model":"PartAI/Llama3.2-1B-Part-v1", "Precision": "bfloat16", "#Params (B)": 1.24, "Part Multiple Choice": 0, "ARC Easy": 22.70, "ARC Challenge" : 25.50, "MMLU Pro": 7.15, "AUT Multiple Choice Persian": 25.24, "Hub License": "llama3.2", "Model sha": "main", "model_name_for_query": "PartAI/Llama3.2-1B-Part-v1"}
37
  {"Model":"CohereForAI/c4ai-command-a-03-2025", "Precision": "bfloat16", "#Params (B)": 111, "Part Multiple Choice": 49.49, "ARC Easy": 96.55, "ARC Challenge": 86.57, "MMLU Pro": 43.43, "AUT Multiple Choice Persian": 70.21, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/c4ai-command-a-03-2025"}
38
- {"Model":"meta-llama/Llama-4-Scout-17B-16E-Instruct", "Precision": "bfloat16", "#Params (B)": 109, "Part Multiple Choice": 0, "ARC Easy": 0, "ARC Challenge": 0, "MMLU Pro": 39.14, "AUT Multiple Choice Persian": 0, "Hub License": "llama4", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-4-Scout-17B-16E-Instruct"}
39
  {"Model":"google/gemma-3-1b-it", "Precision": "bfloat16", "#Params (B)": 1, "Part Multiple Choice": 27.15, "ARC Easy": 41.67, "ARC Challenge": 31.54, "MMLU Pro": 16.23, "AUT Multiple Choice Persian": 36.88, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-1b-it"}
40
  {"Model":"google/gemma-3-4b-it", "Precision": "bfloat16", "#Params (B)": 4.3, "Part Multiple Choice": 34.40, "ARC Easy": 77.01, "ARC Challenge": 63.76, "MMLU Pro": 19.81, "AUT Multiple Choice Persian": 50.30, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-4b-it"}
41
  {"Model":"google/gemma-3-12b-it", "Precision": "bfloat16", "#Params (B)": 12.2, "Part Multiple Choice": 43.52, "ARC Easy": 93.39, "ARC Challenge": 81.21, "MMLU Pro": 29.36, "AUT Multiple Choice Persian": 57.00, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-12b-it"}
42
- {"Model":"google/gemma-3-27b-it", "Precision": "bfloat16", "#Params (B)": 27.4, "Part Multiple Choice": 48.56, "ARC Easy": 95.69, "ARC Challenge": 90.60, "MMLU Pro": 40.10, "AUT Multiple Choice Persian": 64.30, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-27b-it"}
 
 
35
  {"Model":"CohereForAI/c4ai-command-r7b-12-2024", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 34.03, "ARC Easy": 77.01, "ARC Challenge": 66.44, "MMLU Pro": 23.62, "AUT Multiple Choice Persian": 50.49, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/c4ai-command-r7b-12-2024"}
36
  {"Model":"PartAI/Llama3.2-1B-Part-v1", "Precision": "bfloat16", "#Params (B)": 1.24, "Part Multiple Choice": 0, "ARC Easy": 22.70, "ARC Challenge" : 25.50, "MMLU Pro": 7.15, "AUT Multiple Choice Persian": 25.24, "Hub License": "llama3.2", "Model sha": "main", "model_name_for_query": "PartAI/Llama3.2-1B-Part-v1"}
37
  {"Model":"CohereForAI/c4ai-command-a-03-2025", "Precision": "bfloat16", "#Params (B)": 111, "Part Multiple Choice": 49.49, "ARC Easy": 96.55, "ARC Challenge": 86.57, "MMLU Pro": 43.43, "AUT Multiple Choice Persian": 70.21, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/c4ai-command-a-03-2025"}
38
+ {"Model":"meta-llama/Llama-4-Scout-17B-16E-Instruct", "Precision": "bfloat16", "#Params (B)": 109, "Part Multiple Choice": 53.75, "ARC Easy": 94.83, "ARC Challenge": 88.59, "MMLU Pro": 39.14, "AUT Multiple Choice Persian": 66.86, "Hub License": "llama4", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-4-Scout-17B-16E-Instruct"}
39
  {"Model":"google/gemma-3-1b-it", "Precision": "bfloat16", "#Params (B)": 1, "Part Multiple Choice": 27.15, "ARC Easy": 41.67, "ARC Challenge": 31.54, "MMLU Pro": 16.23, "AUT Multiple Choice Persian": 36.88, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-1b-it"}
40
  {"Model":"google/gemma-3-4b-it", "Precision": "bfloat16", "#Params (B)": 4.3, "Part Multiple Choice": 34.40, "ARC Easy": 77.01, "ARC Challenge": 63.76, "MMLU Pro": 19.81, "AUT Multiple Choice Persian": 50.30, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-4b-it"}
41
  {"Model":"google/gemma-3-12b-it", "Precision": "bfloat16", "#Params (B)": 12.2, "Part Multiple Choice": 43.52, "ARC Easy": 93.39, "ARC Challenge": 81.21, "MMLU Pro": 29.36, "AUT Multiple Choice Persian": 57.00, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-12b-it"}
42
+ {"Model":"google/gemma-3-27b-it", "Precision": "bfloat16", "#Params (B)": 27.4, "Part Multiple Choice": 48.56, "ARC Easy": 95.69, "ARC Challenge": 90.60, "MMLU Pro": 40.10, "AUT Multiple Choice Persian": 64.30, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-3-27b-it"}
43
+ {"Model":"Qwen/QwQ-32B", "Precision": "bfloat16", "#Params (B)": 32.8, "Part Multiple Choice": 46.48, "ARC Easy": 89.37, "ARC Challenge": 82.55, "MMLU Pro": 35.32, "AUT Multiple Choice Persian": 57.40, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/QwQ-32B"}