euro-llm-leaderboard-requests
/
seedboxai
/Llama-3-KafkaLM-8B-v0.1
/results_2024_05_31T12-04-14.json
{ | |
"config_general": { | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": "auto:6", | |
"max_samples": "null", | |
"job_id": "", | |
"model_name": "seedboxai/Llama-3-KafkaLM-8B-v0.1", | |
"model_sha": "", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "" | |
}, | |
"results": { | |
"harness|truthfulqa_mc2_m_de|0": { | |
"acc,none": 0.3032994923857868, | |
"acc_stderr,none": 0.016385946744217592, | |
"alias": "truthfulqa_mc2_m_de" | |
}, | |
"harness|truthfulqa_mc2_m_es|0": { | |
"acc,none": 0.3041825095057034, | |
"acc_stderr,none": 0.01638897078747584, | |
"alias": "truthfulqa_mc2_m_es" | |
}, | |
"harness|arc_challenge_m_it|25": { | |
"acc,none": 0.5483319076133447, | |
"acc_stderr,none": 0.01456163173396195, | |
"acc_norm,none": 0.58169375534645, | |
"acc_norm_stderr,none": 0.01443354309879438, | |
"alias": "arc_challenge_m_it" | |
}, | |
"harness|mmlu_m_de|5": { | |
"acc,none": 0.5758032885804797, | |
"acc_stderr,none": 0.004292379291831065, | |
"alias": "mmlu_m_de" | |
}, | |
"harness|belebele_ita_Latn|5": { | |
"acc,none": 0.8466666666666667, | |
"acc_stderr,none": 0.012016961604722178, | |
"acc_norm,none": 0.8466666666666667, | |
"acc_norm_stderr,none": 0.012016961604722178, | |
"alias": "belebele_ita_Latn" | |
}, | |
"harness|mmlu_m_fr|5": { | |
"acc,none": 0.5828431747001757, | |
"acc_stderr,none": 0.004309786050228465, | |
"alias": "mmlu_m_fr" | |
}, | |
"harness|belebele_eng_Latn|5": { | |
"acc,none": 0.9077777777777778, | |
"acc_stderr,none": 0.00965000901695865, | |
"acc_norm,none": 0.9077777777777778, | |
"acc_norm_stderr,none": 0.00965000901695865, | |
"alias": "belebele_eng_Latn" | |
}, | |
"harness|truthfulqa_mc2_m_it|0": { | |
"acc,none": 0.2988505747126437, | |
"acc_stderr,none": 0.016369256815093124, | |
"alias": "truthfulqa_mc2_m_it" | |
}, | |
"harness|arc_challenge_m_de|25": { | |
"acc,none": 0.5295124037639007, | |
"acc_stderr,none": 0.014604635981218392, | |
"acc_norm,none": 0.5765611633875107, | |
"acc_norm_stderr,none": 0.014457613658173296, | |
"alias": "arc_challenge_m_de" | |
}, | |
"harness|mmlu_m_es|5": { | |
"acc,none": 0.5836958152092395, | |
"acc_stderr,none": 0.004269084376745433, | |
"alias": "mmlu_m_es" | |
}, | |
"harness|gsm8k|5": { | |
"exact_match,get-answer": 0.7028051554207733, | |
"exact_match_stderr,get-answer": 0.01258868596662417, | |
"alias": "gsm8k" | |
}, | |
"harness|arc_challenge_m_es|25": { | |
"acc,none": 0.5717948717948718, | |
"acc_stderr,none": 0.014472341586181993, | |
"acc_norm,none": 0.6051282051282051, | |
"acc_norm_stderr,none": 0.014296986243361854, | |
"alias": "arc_challenge_m_es" | |
}, | |
"harness|belebele_fra_Latn|5": { | |
"acc,none": 0.8744444444444445, | |
"acc_stderr,none": 0.011051067526018452, | |
"acc_norm,none": 0.8744444444444445, | |
"acc_norm_stderr,none": 0.011051067526018452, | |
"alias": "belebele_fra_Latn" | |
}, | |
"harness|arc_challenge_m_fr|25": { | |
"acc,none": 0.5500427715996579, | |
"acc_stderr,none": 0.014556683049829986, | |
"acc_norm,none": 0.5911035072711719, | |
"acc_norm_stderr,none": 0.014385237598173258, | |
"alias": "arc_challenge_m_fr" | |
}, | |
"harness|belebele_spa_Latn|5": { | |
"acc,none": 0.8533333333333334, | |
"acc_stderr,none": 0.011799000521176608, | |
"acc_norm,none": 0.8533333333333334, | |
"acc_norm_stderr,none": 0.011799000521176608, | |
"alias": "belebele_spa_Latn" | |
}, | |
"harness|mmlu_m_it|5": { | |
"acc,none": 0.5743748583515902, | |
"acc_stderr,none": 0.0042976690467980885, | |
"alias": "mmlu_m_it" | |
}, | |
"harness|arc_challenge|25": { | |
"acc,none": 0.643344709897611, | |
"acc_stderr,none": 0.013998056902620196, | |
"acc_norm,none": 0.6902730375426621, | |
"acc_norm_stderr,none": 0.013512058415238361, | |
"alias": "arc_challenge" | |
}, | |
"harness|hendrycksTest|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-humanities|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-formal_logic|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_european_history|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_us_history|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_world_history|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-international_law|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-jurisprudence|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-logical_fallacies|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-moral_disputes|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-moral_scenarios|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-philosophy|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-prehistory|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_law|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-world_religions|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-other|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-business_ethics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-clinical_knowledge|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_medicine|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-global_facts|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-human_aging|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-management|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-marketing|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-medical_genetics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-miscellaneous|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-nutrition|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_accounting|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_medicine|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-virology|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-social_sciences|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-econometrics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_geography|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_government_and_politics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_macroeconomics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_microeconomics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_psychology|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-human_sexuality|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_psychology|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-public_relations|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-security_studies|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-sociology|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-us_foreign_policy|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-stem|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-abstract_algebra|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-anatomy|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-astronomy|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_biology|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_chemistry|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_computer_science|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_mathematics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_physics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-computer_security|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-conceptual_physics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-electrical_engineering|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-elementary_mathematics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_biology|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_chemistry|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_computer_science|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_mathematics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_physics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_statistics|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-machine_learning|5": { | |
"acc,none": 0.6642928357783793, | |
"acc_stderr,none": 0.12992899779775213, | |
"alias": "mmlu" | |
}, | |
"harness|hellaswag|10": { | |
"acc,none": 0.6434973112925712, | |
"acc_stderr,none": 0.00477987225063371, | |
"acc_norm,none": 0.8348934475204143, | |
"acc_norm_stderr,none": 0.0037051790292873567, | |
"alias": "hellaswag" | |
}, | |
"harness|hellaswag_es|10": { | |
"acc,none": 0.5358438233411564, | |
"acc_stderr,none": 0.005151241050693462, | |
"acc_norm,none": 0.7128227010881161, | |
"acc_norm_stderr,none": 0.004673330608913984, | |
"alias": "hellaswag_es" | |
}, | |
"harness|hellaswag_de|10": { | |
"acc,none": 0.5168659265584971, | |
"acc_stderr,none": 0.00516324252754425, | |
"acc_norm,none": 0.6687660119555935, | |
"acc_norm_stderr,none": 0.004863000153513578, | |
"alias": "hellaswag_de" | |
}, | |
"harness|hellaswag_it|10": { | |
"acc,none": 0.5031001849233112, | |
"acc_stderr,none": 0.005215028043959808, | |
"acc_norm,none": 0.6776895463939955, | |
"acc_norm_stderr,none": 0.004874696494784345, | |
"alias": "hellaswag_it" | |
}, | |
"harness|truthfulqa_mc2_m_fr|0": { | |
"acc,none": 0.3138500635324015, | |
"acc_stderr,none": 0.01655233819391967, | |
"alias": "truthfulqa_mc2_m_fr" | |
}, | |
"harness|truthfulqa_mc2|0": { | |
"acc,none": 0.5814245171942886, | |
"acc_stderr,none": 0.015408058423013654, | |
"alias": "truthfulqa_mc2" | |
}, | |
"harness|hellaswag_fr|10": { | |
"acc,none": 0.5233454701220818, | |
"acc_stderr,none": 0.0051688320109455396, | |
"acc_norm,none": 0.6951167273506104, | |
"acc_norm_stderr,none": 0.004764221987906111, | |
"alias": "hellaswag_fr" | |
}, | |
"harness|belebele_deu_Latn|5": { | |
"acc,none": 0.8588888888888889, | |
"acc_stderr,none": 0.01161099002964044, | |
"acc_norm,none": 0.8588888888888889, | |
"acc_norm_stderr,none": 0.01161099002964044, | |
"alias": "belebele_deu_Latn" | |
} | |
}, | |
"versions": { | |
"harness|truthfulqa_mc2_m_de|0": "Yaml", | |
"harness|truthfulqa_mc2_m_es|0": "Yaml", | |
"harness|arc_challenge_m_it|25": 1.0, | |
"harness|mmlu_m_de|5": "Yaml", | |
"harness|belebele_ita_Latn|5": 0.0, | |
"harness|mmlu_m_fr|5": "Yaml", | |
"harness|belebele_eng_Latn|5": 0.0, | |
"harness|truthfulqa_mc2_m_it|0": "Yaml", | |
"harness|arc_challenge_m_de|25": 1.0, | |
"harness|mmlu_m_es|5": "Yaml", | |
"harness|gsm8k|5": 2.0, | |
"harness|arc_challenge_m_es|25": 1.0, | |
"harness|belebele_fra_Latn|5": 0.0, | |
"harness|arc_challenge_m_fr|25": 1.0, | |
"harness|belebele_spa_Latn|5": 0.0, | |
"harness|mmlu_m_it|5": "Yaml", | |
"harness|arc_challenge|25": 1.0, | |
"harness|hendrycksTest|5": "N/A", | |
"harness|hendrycksTest-humanities|5": "N/A", | |
"harness|hendrycksTest-formal_logic|5": "N/A", | |
"harness|hendrycksTest-high_school_european_history|5": "N/A", | |
"harness|hendrycksTest-high_school_us_history|5": "N/A", | |
"harness|hendrycksTest-high_school_world_history|5": "N/A", | |
"harness|hendrycksTest-international_law|5": "N/A", | |
"harness|hendrycksTest-jurisprudence|5": "N/A", | |
"harness|hendrycksTest-logical_fallacies|5": "N/A", | |
"harness|hendrycksTest-moral_disputes|5": "N/A", | |
"harness|hendrycksTest-moral_scenarios|5": "N/A", | |
"harness|hendrycksTest-philosophy|5": "N/A", | |
"harness|hendrycksTest-prehistory|5": "N/A", | |
"harness|hendrycksTest-professional_law|5": "N/A", | |
"harness|hendrycksTest-world_religions|5": "N/A", | |
"harness|hendrycksTest-other|5": "N/A", | |
"harness|hendrycksTest-business_ethics|5": "N/A", | |
"harness|hendrycksTest-clinical_knowledge|5": "N/A", | |
"harness|hendrycksTest-college_medicine|5": "N/A", | |
"harness|hendrycksTest-global_facts|5": "N/A", | |
"harness|hendrycksTest-human_aging|5": "N/A", | |
"harness|hendrycksTest-management|5": "N/A", | |
"harness|hendrycksTest-marketing|5": "N/A", | |
"harness|hendrycksTest-medical_genetics|5": "N/A", | |
"harness|hendrycksTest-miscellaneous|5": "N/A", | |
"harness|hendrycksTest-nutrition|5": "N/A", | |
"harness|hendrycksTest-professional_accounting|5": "N/A", | |
"harness|hendrycksTest-professional_medicine|5": "N/A", | |
"harness|hendrycksTest-virology|5": "N/A", | |
"harness|hendrycksTest-social_sciences|5": "N/A", | |
"harness|hendrycksTest-econometrics|5": "N/A", | |
"harness|hendrycksTest-high_school_geography|5": "N/A", | |
"harness|hendrycksTest-high_school_government_and_politics|5": "N/A", | |
"harness|hendrycksTest-high_school_macroeconomics|5": "N/A", | |
"harness|hendrycksTest-high_school_microeconomics|5": "N/A", | |
"harness|hendrycksTest-high_school_psychology|5": "N/A", | |
"harness|hendrycksTest-human_sexuality|5": "N/A", | |
"harness|hendrycksTest-professional_psychology|5": "N/A", | |
"harness|hendrycksTest-public_relations|5": "N/A", | |
"harness|hendrycksTest-security_studies|5": "N/A", | |
"harness|hendrycksTest-sociology|5": "N/A", | |
"harness|hendrycksTest-us_foreign_policy|5": "N/A", | |
"harness|hendrycksTest-stem|5": "N/A", | |
"harness|hendrycksTest-abstract_algebra|5": "N/A", | |
"harness|hendrycksTest-anatomy|5": "N/A", | |
"harness|hendrycksTest-astronomy|5": "N/A", | |
"harness|hendrycksTest-college_biology|5": "N/A", | |
"harness|hendrycksTest-college_chemistry|5": "N/A", | |
"harness|hendrycksTest-college_computer_science|5": "N/A", | |
"harness|hendrycksTest-college_mathematics|5": "N/A", | |
"harness|hendrycksTest-college_physics|5": "N/A", | |
"harness|hendrycksTest-computer_security|5": "N/A", | |
"harness|hendrycksTest-conceptual_physics|5": "N/A", | |
"harness|hendrycksTest-electrical_engineering|5": "N/A", | |
"harness|hendrycksTest-elementary_mathematics|5": "N/A", | |
"harness|hendrycksTest-high_school_biology|5": "N/A", | |
"harness|hendrycksTest-high_school_chemistry|5": "N/A", | |
"harness|hendrycksTest-high_school_computer_science|5": "N/A", | |
"harness|hendrycksTest-high_school_mathematics|5": "N/A", | |
"harness|hendrycksTest-high_school_physics|5": "N/A", | |
"harness|hendrycksTest-high_school_statistics|5": "N/A", | |
"harness|hendrycksTest-machine_learning|5": "N/A", | |
"harness|hellaswag|10": 1.0, | |
"harness|hellaswag_es|10": 1.0, | |
"harness|hellaswag_de|10": 1.0, | |
"harness|hellaswag_it|10": 1.0, | |
"harness|truthfulqa_mc2_m_fr|0": "Yaml", | |
"harness|truthfulqa_mc2|0": 2.0, | |
"harness|hellaswag_fr|10": 1.0, | |
"harness|belebele_deu_Latn|5": 0.0 | |
} | |
} |