{ "config_general": { "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": "auto:6", "max_samples": "null", "job_id": "", "model_name": "mistralai/Mistral-7B-Instruct-v0.2", "model_sha": "", "model_dtype": "torch.bfloat16", "model_size": "" }, "results": { "harness|truthfulqa_mc2_m_de|0": { "acc,none": 0.3769035532994924, "acc_stderr,none": 0.017274496070731636, "alias": "truthfulqa_mc2_m_de" }, "harness|truthfulqa_mc2_m_es|0": { "acc,none": 0.37389100126742714, "acc_stderr,none": 0.017235916064634944, "alias": "truthfulqa_mc2_m_es" }, "harness|arc_challenge_m_it|25": { "acc,none": 0.4696321642429427, "acc_stderr,none": 0.014603134472811797, "acc_norm,none": 0.5192472198460223, "acc_norm_stderr,none": 0.01461929974758558, "alias": "arc_challenge_m_it" }, "harness|mmlu_m_de|5": { "acc,none": 0.5019610801025796, "acc_stderr,none": 0.004342542081895657, "alias": "mmlu_m_de" }, "harness|belebele_ita_Latn|5": { "acc,none": 0.7033333333333334, "acc_stderr,none": 0.015234742930687583, "acc_norm,none": 0.7033333333333334, "acc_norm_stderr,none": 0.015234742930687583, "alias": "belebele_ita_Latn" }, "harness|mmlu_m_fr|5": { "acc,none": 0.5068367580780689, "acc_stderr,none": 0.004369780057319525, "alias": "mmlu_m_fr" }, "harness|belebele_eng_Latn|5": { "acc,none": 0.8244444444444444, "acc_stderr,none": 0.012688437383538148, "acc_norm,none": 0.8244444444444444, "acc_norm_stderr,none": 0.012688437383538148, "alias": "belebele_eng_Latn" }, "harness|truthfulqa_mc2_m_it|0": { "acc,none": 0.3499361430395913, "acc_stderr,none": 0.017055679797150423, "alias": "truthfulqa_mc2_m_it" }, "harness|arc_challenge_m_de|25": { "acc,none": 0.4388366124893071, "acc_stderr,none": 0.014520269253779024, "acc_norm,none": 0.48588537211291705, "acc_norm_stderr,none": 0.014624312923785979, "alias": "arc_challenge_m_de" }, "harness|mmlu_m_es|5": { "acc,none": 0.5116994150292485, "acc_stderr,none": 0.004328995585776224, "alias": "mmlu_m_es" }, "harness|arc_challenge_m_es|25": { "acc,none": 0.5128205128205128, "acc_stderr,none": 0.014619076398321508, "acc_norm,none": 0.5418803418803418, "acc_norm_stderr,none": 0.014572494714128992, "alias": "arc_challenge_m_es" }, "harness|belebele_fra_Latn|5": { "acc,none": 0.7588888888888888, "acc_stderr,none": 0.014266513886578925, "acc_norm,none": 0.7588888888888888, "acc_norm_stderr,none": 0.014266513886578925, "alias": "belebele_fra_Latn" }, "harness|arc_challenge_m_fr|25": { "acc,none": 0.5029940119760479, "acc_stderr,none": 0.01462988110480259, "acc_norm,none": 0.5517536355859709, "acc_norm_stderr,none": 0.014551560465785601, "alias": "arc_challenge_m_fr" }, "harness|belebele_spa_Latn|5": { "acc,none": 0.73, "acc_stderr,none": 0.014806876915962119, "acc_norm,none": 0.73, "acc_norm_stderr,none": 0.014806876915962119, "alias": "belebele_spa_Latn" }, "harness|mmlu_m_it|5": { "acc,none": 0.4990556772682632, "acc_stderr,none": 0.004346011293413894, "alias": "mmlu_m_it" }, "harness|arc_challenge|25": { "acc,none": 0.5802047781569966, "acc_stderr,none": 0.014422181226303026, "acc_norm,none": 0.6373720136518771, "acc_norm_stderr,none": 0.01404910656495501, "alias": "arc_challenge" }, "harness|hendrycksTest|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-humanities|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-formal_logic|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_european_history|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_us_history|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_world_history|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-international_law|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-jurisprudence|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-logical_fallacies|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-moral_disputes|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-moral_scenarios|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-philosophy|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-prehistory|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-professional_law|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-world_religions|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-other|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-business_ethics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-clinical_knowledge|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-college_medicine|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-global_facts|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-human_aging|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-management|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-marketing|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-medical_genetics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-miscellaneous|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-nutrition|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-professional_accounting|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-professional_medicine|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-virology|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-social_sciences|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-econometrics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_geography|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_psychology|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-human_sexuality|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-professional_psychology|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-public_relations|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-security_studies|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-sociology|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-us_foreign_policy|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-stem|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-abstract_algebra|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-anatomy|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-astronomy|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-college_biology|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-college_chemistry|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-college_computer_science|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-college_mathematics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-college_physics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-computer_security|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-conceptual_physics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-electrical_engineering|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-elementary_mathematics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_biology|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_chemistry|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_computer_science|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_mathematics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_physics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-high_school_statistics|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hendrycksTest-machine_learning|5": { "acc,none": 0.5920096852300242, "acc_stderr,none": 0.1327080584112874, "alias": "mmlu" }, "harness|hellaswag|10": { "acc,none": 0.6596295558653654, "acc_stderr,none": 0.004728653488866953, "acc_norm,none": 0.8463453495319657, "acc_norm_stderr,none": 0.0035988038554605317, "alias": "hellaswag" }, "harness|hellaswag_es|10": { "acc,none": 0.5163217409857052, "acc_stderr,none": 0.005161776321030031, "acc_norm,none": 0.6854064433539577, "acc_norm_stderr,none": 0.004796337051943834, "alias": "hellaswag_es" }, "harness|hellaswag_de|10": { "acc,none": 0.47171221178479933, "acc_stderr,none": 0.005157907928882325, "acc_norm,none": 0.6224380871050385, "acc_norm_stderr,none": 0.0050088947767330414, "alias": "hellaswag_de" }, "harness|hellaswag_it|10": { "acc,none": 0.4816708365060372, "acc_stderr,none": 0.005211622983028933, "acc_norm,none": 0.6393995431306428, "acc_norm_stderr,none": 0.00500834559575331, "alias": "hellaswag_it" }, "harness|truthfulqa_mc2_m_fr|0": { "acc,none": 0.38246505717916135, "acc_stderr,none": 0.01733466196278422, "alias": "truthfulqa_mc2_m_fr" }, "harness|truthfulqa_mc2|0": { "acc,none": 0.668115596446337, "acc_stderr,none": 0.015254193275099396, "alias": "truthfulqa_mc2" }, "harness|hellaswag_fr|10": { "acc,none": 0.5099593060612551, "acc_stderr,none": 0.005173448788809984, "acc_norm,none": 0.679160419790105, "acc_norm_stderr,none": 0.004830882499454409, "alias": "hellaswag_fr" }, "harness|belebele_deu_Latn|5": { "acc,none": 0.6888888888888889, "acc_stderr,none": 0.015440185195103378, "acc_norm,none": 0.6888888888888889, "acc_norm_stderr,none": 0.015440185195103378, "alias": "belebele_deu_Latn" } }, "versions": { "harness|truthfulqa_mc2_m_de|0": "Yaml", "harness|truthfulqa_mc2_m_es|0": "Yaml", "harness|arc_challenge_m_it|25": 1.0, "harness|mmlu_m_de|5": "Yaml", "harness|belebele_ita_Latn|5": 0.0, "harness|mmlu_m_fr|5": "Yaml", "harness|belebele_eng_Latn|5": 0.0, "harness|truthfulqa_mc2_m_it|0": "Yaml", "harness|arc_challenge_m_de|25": 1.0, "harness|mmlu_m_es|5": "Yaml", "harness|arc_challenge_m_es|25": 1.0, "harness|belebele_fra_Latn|5": 0.0, "harness|arc_challenge_m_fr|25": 1.0, "harness|belebele_spa_Latn|5": 0.0, "harness|mmlu_m_it|5": "Yaml", "harness|arc_challenge|25": 1.0, "harness|hendrycksTest|5": "N/A", "harness|hendrycksTest-humanities|5": "N/A", "harness|hendrycksTest-formal_logic|5": "N/A", "harness|hendrycksTest-high_school_european_history|5": "N/A", "harness|hendrycksTest-high_school_us_history|5": "N/A", "harness|hendrycksTest-high_school_world_history|5": "N/A", "harness|hendrycksTest-international_law|5": "N/A", "harness|hendrycksTest-jurisprudence|5": "N/A", "harness|hendrycksTest-logical_fallacies|5": "N/A", "harness|hendrycksTest-moral_disputes|5": "N/A", "harness|hendrycksTest-moral_scenarios|5": "N/A", "harness|hendrycksTest-philosophy|5": "N/A", "harness|hendrycksTest-prehistory|5": "N/A", "harness|hendrycksTest-professional_law|5": "N/A", "harness|hendrycksTest-world_religions|5": "N/A", "harness|hendrycksTest-other|5": "N/A", "harness|hendrycksTest-business_ethics|5": "N/A", "harness|hendrycksTest-clinical_knowledge|5": "N/A", "harness|hendrycksTest-college_medicine|5": "N/A", "harness|hendrycksTest-global_facts|5": "N/A", "harness|hendrycksTest-human_aging|5": "N/A", "harness|hendrycksTest-management|5": "N/A", "harness|hendrycksTest-marketing|5": "N/A", "harness|hendrycksTest-medical_genetics|5": "N/A", "harness|hendrycksTest-miscellaneous|5": "N/A", "harness|hendrycksTest-nutrition|5": "N/A", "harness|hendrycksTest-professional_accounting|5": "N/A", "harness|hendrycksTest-professional_medicine|5": "N/A", "harness|hendrycksTest-virology|5": "N/A", "harness|hendrycksTest-social_sciences|5": "N/A", "harness|hendrycksTest-econometrics|5": "N/A", "harness|hendrycksTest-high_school_geography|5": "N/A", "harness|hendrycksTest-high_school_government_and_politics|5": "N/A", "harness|hendrycksTest-high_school_macroeconomics|5": "N/A", "harness|hendrycksTest-high_school_microeconomics|5": "N/A", "harness|hendrycksTest-high_school_psychology|5": "N/A", "harness|hendrycksTest-human_sexuality|5": "N/A", "harness|hendrycksTest-professional_psychology|5": "N/A", "harness|hendrycksTest-public_relations|5": "N/A", "harness|hendrycksTest-security_studies|5": "N/A", "harness|hendrycksTest-sociology|5": "N/A", "harness|hendrycksTest-us_foreign_policy|5": "N/A", "harness|hendrycksTest-stem|5": "N/A", "harness|hendrycksTest-abstract_algebra|5": "N/A", "harness|hendrycksTest-anatomy|5": "N/A", "harness|hendrycksTest-astronomy|5": "N/A", "harness|hendrycksTest-college_biology|5": "N/A", "harness|hendrycksTest-college_chemistry|5": "N/A", "harness|hendrycksTest-college_computer_science|5": "N/A", "harness|hendrycksTest-college_mathematics|5": "N/A", "harness|hendrycksTest-college_physics|5": "N/A", "harness|hendrycksTest-computer_security|5": "N/A", "harness|hendrycksTest-conceptual_physics|5": "N/A", "harness|hendrycksTest-electrical_engineering|5": "N/A", "harness|hendrycksTest-elementary_mathematics|5": "N/A", "harness|hendrycksTest-high_school_biology|5": "N/A", "harness|hendrycksTest-high_school_chemistry|5": "N/A", "harness|hendrycksTest-high_school_computer_science|5": "N/A", "harness|hendrycksTest-high_school_mathematics|5": "N/A", "harness|hendrycksTest-high_school_physics|5": "N/A", "harness|hendrycksTest-high_school_statistics|5": "N/A", "harness|hendrycksTest-machine_learning|5": "N/A", "harness|hellaswag|10": 1.0, "harness|hellaswag_es|10": 1.0, "harness|hellaswag_de|10": 1.0, "harness|hellaswag_it|10": 1.0, "harness|truthfulqa_mc2_m_fr|0": "Yaml", "harness|truthfulqa_mc2|0": 2.0, "harness|hellaswag_fr|10": 1.0, "harness|belebele_deu_Latn|5": 0.0 } }