euro-llm-leaderboard-requests
/
croissantllm
/CroissantLLMChat-v0.1
/results_2024_05_31T12-01-50.json
{ | |
"config_general": { | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": "auto:6", | |
"max_samples": "null", | |
"job_id": "", | |
"model_name": "croissantllm/CroissantLLMChat-v0.1", | |
"model_sha": "", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "" | |
}, | |
"results": { | |
"harness|hellaswag_it|10": { | |
"acc,none": 0.30218644620907215, | |
"acc_stderr,none": 0.004789632401308303, | |
"acc_norm,none": 0.34439247253344935, | |
"acc_norm_stderr,none": 0.004956142479858007, | |
"alias": "hellaswag_it" | |
}, | |
"harness|belebele_spa_Latn|5": { | |
"acc,none": 0.25666666666666665, | |
"acc_stderr,none": 0.014567891342380039, | |
"acc_norm,none": 0.25666666666666665, | |
"acc_norm_stderr,none": 0.014567891342380039, | |
"alias": "belebele_spa_Latn" | |
}, | |
"harness|arc_challenge_m_it|25": { | |
"acc,none": 0.22583404619332764, | |
"acc_stderr,none": 0.012234615366330675, | |
"acc_norm,none": 0.262617621899059, | |
"acc_norm_stderr,none": 0.012876175520452837, | |
"alias": "arc_challenge_m_it" | |
}, | |
"harness|arc_challenge_m_fr|25": { | |
"acc,none": 0.2840034217279726, | |
"acc_stderr,none": 0.013194588131940972, | |
"acc_norm,none": 0.32677502138579984, | |
"acc_norm_stderr,none": 0.013724076021999806, | |
"alias": "arc_challenge_m_fr" | |
}, | |
"harness|belebele_deu_Latn|5": { | |
"acc,none": 0.2388888888888889, | |
"acc_stderr,none": 0.0142213937312762, | |
"acc_norm,none": 0.2388888888888889, | |
"acc_norm_stderr,none": 0.0142213937312762, | |
"alias": "belebele_deu_Latn" | |
}, | |
"harness|hellaswag_es|10": { | |
"acc,none": 0.32099423938553445, | |
"acc_stderr,none": 0.0048222091210664165, | |
"acc_norm,none": 0.36921271602304245, | |
"acc_norm_stderr,none": 0.004984716707710809, | |
"alias": "hellaswag_es" | |
}, | |
"harness|arc_challenge_m_de|25": { | |
"acc,none": 0.19931565440547477, | |
"acc_stderr,none": 0.011689069809394484, | |
"acc_norm,none": 0.2446535500427716, | |
"acc_norm_stderr,none": 0.01257845892181575, | |
"alias": "arc_challenge_m_de" | |
}, | |
"harness|belebele_ita_Latn|5": { | |
"acc,none": 0.2911111111111111, | |
"acc_stderr,none": 0.015150906906440088, | |
"acc_norm,none": 0.2911111111111111, | |
"acc_norm_stderr,none": 0.015150906906440088, | |
"alias": "belebele_ita_Latn" | |
}, | |
"harness|truthfulqa_mc2_m_de|0": { | |
"acc,none": 0.20558375634517767, | |
"acc_stderr,none": 0.014405591330836916, | |
"alias": "truthfulqa_mc2_m_de" | |
}, | |
"harness|hendrycksTest|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-humanities|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-formal_logic|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_european_history|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_us_history|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_world_history|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-international_law|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-jurisprudence|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-logical_fallacies|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-moral_disputes|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-moral_scenarios|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-philosophy|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-prehistory|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_law|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-world_religions|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-other|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-business_ethics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-clinical_knowledge|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_medicine|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-global_facts|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-human_aging|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-management|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-marketing|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-medical_genetics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-miscellaneous|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-nutrition|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_accounting|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_medicine|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-virology|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-social_sciences|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-econometrics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_geography|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_government_and_politics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_macroeconomics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_microeconomics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_psychology|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-human_sexuality|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_psychology|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-public_relations|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-security_studies|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-sociology|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-us_foreign_policy|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-stem|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-abstract_algebra|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-anatomy|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-astronomy|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_biology|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_chemistry|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_computer_science|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_mathematics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_physics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-computer_security|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-conceptual_physics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-electrical_engineering|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-elementary_mathematics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_biology|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_chemistry|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_computer_science|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_mathematics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_physics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_statistics|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-machine_learning|5": { | |
"acc,none": 0.25081897165645917, | |
"acc_stderr,none": 0.03887949265403574, | |
"alias": "mmlu" | |
}, | |
"harness|arc_challenge|25": { | |
"acc,none": 0.302901023890785, | |
"acc_stderr,none": 0.013428241573185349, | |
"acc_norm,none": 0.3250853242320819, | |
"acc_norm_stderr,none": 0.013688147309729117, | |
"alias": "arc_challenge" | |
}, | |
"harness|hellaswag|10": { | |
"acc,none": 0.4283011352320255, | |
"acc_stderr,none": 0.004938212723748209, | |
"acc_norm,none": 0.5612427803226449, | |
"acc_norm_stderr,none": 0.0049522098318565775, | |
"alias": "hellaswag" | |
}, | |
"harness|hellaswag_de|10": { | |
"acc,none": 0.28949615713065757, | |
"acc_stderr,none": 0.004686022365371444, | |
"acc_norm,none": 0.3123398804440649, | |
"acc_norm_stderr,none": 0.0047885097822260945, | |
"alias": "hellaswag_de" | |
}, | |
"harness|belebele_fra_Latn|5": { | |
"acc,none": 0.26, | |
"acc_stderr,none": 0.014629271097998376, | |
"acc_norm,none": 0.26, | |
"acc_norm_stderr,none": 0.014629271097998376, | |
"alias": "belebele_fra_Latn" | |
}, | |
"harness|truthfulqa_mc2_m_es|0": { | |
"acc,none": 0.22433460076045628, | |
"acc_stderr,none": 0.01486011711896892, | |
"alias": "truthfulqa_mc2_m_es" | |
}, | |
"harness|belebele_eng_Latn|5": { | |
"acc,none": 0.28888888888888886, | |
"acc_stderr,none": 0.015116606414982333, | |
"acc_norm,none": 0.28888888888888886, | |
"acc_norm_stderr,none": 0.015116606414982333, | |
"alias": "belebele_eng_Latn" | |
}, | |
"harness|truthfulqa_mc2_m_fr|0": { | |
"acc,none": 0.19822109275730623, | |
"acc_stderr,none": 0.014219717662860107, | |
"alias": "truthfulqa_mc2_m_fr" | |
}, | |
"harness|truthfulqa_mc2|0": { | |
"acc,none": 0.3965607460783958, | |
"acc_stderr,none": 0.014745046690858642, | |
"alias": "truthfulqa_mc2" | |
}, | |
"harness|mmlu_m_es|5": { | |
"acc,none": 0.2496625168741563, | |
"acc_stderr,none": 0.003748357919855524, | |
"alias": "mmlu_m_es" | |
}, | |
"harness|mmlu_m_de|5": { | |
"acc,none": 0.23472620304721678, | |
"acc_stderr,none": 0.0036810064660393487, | |
"alias": "mmlu_m_de" | |
}, | |
"harness|truthfulqa_mc2_m_it|0": { | |
"acc,none": 0.21328224776500637, | |
"acc_stderr,none": 0.014648172749593518, | |
"alias": "truthfulqa_mc2_m_it" | |
}, | |
"harness|mmlu_m_it|5": { | |
"acc,none": 0.24174661932462038, | |
"acc_stderr,none": 0.0037214227061056937, | |
"alias": "mmlu_m_it" | |
}, | |
"harness|mmlu_m_fr|5": { | |
"acc,none": 0.24352608662439845, | |
"acc_stderr,none": 0.00375145636866583, | |
"alias": "mmlu_m_fr" | |
}, | |
"harness|arc_challenge_m_es|25": { | |
"acc,none": 0.2299145299145299, | |
"acc_stderr,none": 0.012306807801471982, | |
"acc_norm,none": 0.252991452991453, | |
"acc_norm_stderr,none": 0.01271476841834667, | |
"alias": "arc_challenge_m_es" | |
}, | |
"harness|hellaswag_fr|10": { | |
"acc,none": 0.4185050331976869, | |
"acc_stderr,none": 0.005105280908505716, | |
"acc_norm,none": 0.5326622403084172, | |
"acc_norm_stderr,none": 0.005163423088888286, | |
"alias": "hellaswag_fr" | |
} | |
}, | |
"versions": { | |
"harness|hellaswag_it|10": 1.0, | |
"harness|belebele_spa_Latn|5": 0.0, | |
"harness|arc_challenge_m_it|25": 1.0, | |
"harness|arc_challenge_m_fr|25": 1.0, | |
"harness|belebele_deu_Latn|5": 0.0, | |
"harness|hellaswag_es|10": 1.0, | |
"harness|arc_challenge_m_de|25": 1.0, | |
"harness|belebele_ita_Latn|5": 0.0, | |
"harness|truthfulqa_mc2_m_de|0": "Yaml", | |
"harness|hendrycksTest|5": "N/A", | |
"harness|hendrycksTest-humanities|5": "N/A", | |
"harness|hendrycksTest-formal_logic|5": "N/A", | |
"harness|hendrycksTest-high_school_european_history|5": "N/A", | |
"harness|hendrycksTest-high_school_us_history|5": "N/A", | |
"harness|hendrycksTest-high_school_world_history|5": "N/A", | |
"harness|hendrycksTest-international_law|5": "N/A", | |
"harness|hendrycksTest-jurisprudence|5": "N/A", | |
"harness|hendrycksTest-logical_fallacies|5": "N/A", | |
"harness|hendrycksTest-moral_disputes|5": "N/A", | |
"harness|hendrycksTest-moral_scenarios|5": "N/A", | |
"harness|hendrycksTest-philosophy|5": "N/A", | |
"harness|hendrycksTest-prehistory|5": "N/A", | |
"harness|hendrycksTest-professional_law|5": "N/A", | |
"harness|hendrycksTest-world_religions|5": "N/A", | |
"harness|hendrycksTest-other|5": "N/A", | |
"harness|hendrycksTest-business_ethics|5": "N/A", | |
"harness|hendrycksTest-clinical_knowledge|5": "N/A", | |
"harness|hendrycksTest-college_medicine|5": "N/A", | |
"harness|hendrycksTest-global_facts|5": "N/A", | |
"harness|hendrycksTest-human_aging|5": "N/A", | |
"harness|hendrycksTest-management|5": "N/A", | |
"harness|hendrycksTest-marketing|5": "N/A", | |
"harness|hendrycksTest-medical_genetics|5": "N/A", | |
"harness|hendrycksTest-miscellaneous|5": "N/A", | |
"harness|hendrycksTest-nutrition|5": "N/A", | |
"harness|hendrycksTest-professional_accounting|5": "N/A", | |
"harness|hendrycksTest-professional_medicine|5": "N/A", | |
"harness|hendrycksTest-virology|5": "N/A", | |
"harness|hendrycksTest-social_sciences|5": "N/A", | |
"harness|hendrycksTest-econometrics|5": "N/A", | |
"harness|hendrycksTest-high_school_geography|5": "N/A", | |
"harness|hendrycksTest-high_school_government_and_politics|5": "N/A", | |
"harness|hendrycksTest-high_school_macroeconomics|5": "N/A", | |
"harness|hendrycksTest-high_school_microeconomics|5": "N/A", | |
"harness|hendrycksTest-high_school_psychology|5": "N/A", | |
"harness|hendrycksTest-human_sexuality|5": "N/A", | |
"harness|hendrycksTest-professional_psychology|5": "N/A", | |
"harness|hendrycksTest-public_relations|5": "N/A", | |
"harness|hendrycksTest-security_studies|5": "N/A", | |
"harness|hendrycksTest-sociology|5": "N/A", | |
"harness|hendrycksTest-us_foreign_policy|5": "N/A", | |
"harness|hendrycksTest-stem|5": "N/A", | |
"harness|hendrycksTest-abstract_algebra|5": "N/A", | |
"harness|hendrycksTest-anatomy|5": "N/A", | |
"harness|hendrycksTest-astronomy|5": "N/A", | |
"harness|hendrycksTest-college_biology|5": "N/A", | |
"harness|hendrycksTest-college_chemistry|5": "N/A", | |
"harness|hendrycksTest-college_computer_science|5": "N/A", | |
"harness|hendrycksTest-college_mathematics|5": "N/A", | |
"harness|hendrycksTest-college_physics|5": "N/A", | |
"harness|hendrycksTest-computer_security|5": "N/A", | |
"harness|hendrycksTest-conceptual_physics|5": "N/A", | |
"harness|hendrycksTest-electrical_engineering|5": "N/A", | |
"harness|hendrycksTest-elementary_mathematics|5": "N/A", | |
"harness|hendrycksTest-high_school_biology|5": "N/A", | |
"harness|hendrycksTest-high_school_chemistry|5": "N/A", | |
"harness|hendrycksTest-high_school_computer_science|5": "N/A", | |
"harness|hendrycksTest-high_school_mathematics|5": "N/A", | |
"harness|hendrycksTest-high_school_physics|5": "N/A", | |
"harness|hendrycksTest-high_school_statistics|5": "N/A", | |
"harness|hendrycksTest-machine_learning|5": "N/A", | |
"harness|arc_challenge|25": 1.0, | |
"harness|hellaswag|10": 1.0, | |
"harness|hellaswag_de|10": 1.0, | |
"harness|belebele_fra_Latn|5": 0.0, | |
"harness|truthfulqa_mc2_m_es|0": "Yaml", | |
"harness|belebele_eng_Latn|5": 0.0, | |
"harness|truthfulqa_mc2_m_fr|0": "Yaml", | |
"harness|truthfulqa_mc2|0": 2.0, | |
"harness|mmlu_m_es|5": "Yaml", | |
"harness|mmlu_m_de|5": "Yaml", | |
"harness|truthfulqa_mc2_m_it|0": "Yaml", | |
"harness|mmlu_m_it|5": "Yaml", | |
"harness|mmlu_m_fr|5": "Yaml", | |
"harness|arc_challenge_m_es|25": 1.0, | |
"harness|hellaswag_fr|10": 1.0 | |
} | |
} |