|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.30204778156996587, |
|
"acc_stderr": 0.01341751914471642, |
|
"acc_norm": 0.35580204778157, |
|
"acc_norm_stderr": 0.01399057113791876 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.34096793467436765, |
|
"acc_stderr": 0.0047306580730415515, |
|
"acc_norm": 0.4274048994224258, |
|
"acc_norm_stderr": 0.004936908503140863 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.42105263157894735, |
|
"acc_stderr": 0.03786720706234215, |
|
"acc_norm": 0.42105263157894735, |
|
"acc_norm_stderr": 0.03786720706234215 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.47572815533980584, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.47572815533980584, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.40485312899106, |
|
"acc_stderr": 0.017553246467720243, |
|
"acc_norm": 0.40485312899106, |
|
"acc_norm_stderr": 0.017553246467720243 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3851851851851852, |
|
"acc_stderr": 0.042039210401562783, |
|
"acc_norm": 0.3851851851851852, |
|
"acc_norm_stderr": 0.042039210401562783 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3276595744680851, |
|
"acc_stderr": 0.030683020843231004, |
|
"acc_norm": 0.3276595744680851, |
|
"acc_norm_stderr": 0.030683020843231004 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.29518072289156627, |
|
"acc_stderr": 0.035509201856896294, |
|
"acc_norm": 0.29518072289156627, |
|
"acc_norm_stderr": 0.035509201856896294 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3954983922829582, |
|
"acc_stderr": 0.027770918531427834, |
|
"acc_norm": 0.3954983922829582, |
|
"acc_norm_stderr": 0.027770918531427834 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4260089686098655, |
|
"acc_stderr": 0.03318833286217281, |
|
"acc_norm": 0.4260089686098655, |
|
"acc_norm_stderr": 0.03318833286217281 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3053435114503817, |
|
"acc_stderr": 0.040393149787245605, |
|
"acc_norm": 0.3053435114503817, |
|
"acc_norm_stderr": 0.040393149787245605 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.41414141414141414, |
|
"acc_stderr": 0.03509438348879629, |
|
"acc_norm": 0.41414141414141414, |
|
"acc_norm_stderr": 0.03509438348879629 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3310344827586207, |
|
"acc_stderr": 0.03921545312467122, |
|
"acc_norm": 0.3310344827586207, |
|
"acc_norm_stderr": 0.03921545312467122 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.042207736591714534, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.042207736591714534 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.032145368597886394, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.032145368597886394 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.33589743589743587, |
|
"acc_stderr": 0.023946724741563976, |
|
"acc_norm": 0.33589743589743587, |
|
"acc_norm_stderr": 0.023946724741563976 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.46296296296296297, |
|
"acc_stderr": 0.04820403072760627, |
|
"acc_norm": 0.46296296296296297, |
|
"acc_norm_stderr": 0.04820403072760627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3054187192118227, |
|
"acc_stderr": 0.03240661565868408, |
|
"acc_norm": 0.3054187192118227, |
|
"acc_norm_stderr": 0.03240661565868408 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.38387096774193546, |
|
"acc_stderr": 0.02766618207553965, |
|
"acc_norm": 0.38387096774193546, |
|
"acc_norm_stderr": 0.02766618207553965 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5811965811965812, |
|
"acc_stderr": 0.03232128912157792, |
|
"acc_norm": 0.5811965811965812, |
|
"acc_norm_stderr": 0.03232128912157792 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3622641509433962, |
|
"acc_stderr": 0.0295822451283843, |
|
"acc_norm": 0.3622641509433962, |
|
"acc_norm_stderr": 0.0295822451283843 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.42727272727272725, |
|
"acc_stderr": 0.047381987035454834, |
|
"acc_norm": 0.42727272727272725, |
|
"acc_norm_stderr": 0.047381987035454834 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.027940457136228402, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.027940457136228402 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.43781094527363185, |
|
"acc_stderr": 0.035080801121998406, |
|
"acc_norm": 0.43781094527363185, |
|
"acc_norm_stderr": 0.035080801121998406 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.28901734104046245, |
|
"acc_stderr": 0.034564257450869995, |
|
"acc_norm": 0.28901734104046245, |
|
"acc_norm_stderr": 0.034564257450869995 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.023517294335963286, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.023517294335963286 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.037161774375660164, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.037161774375660164 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.41040462427745666, |
|
"acc_stderr": 0.026483392042098177, |
|
"acc_norm": 0.41040462427745666, |
|
"acc_norm_stderr": 0.026483392042098177 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4233128834355828, |
|
"acc_stderr": 0.03881891213334384, |
|
"acc_norm": 0.4233128834355828, |
|
"acc_norm_stderr": 0.03881891213334384 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.026869490744815254, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.026869490744815254 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.38341968911917096, |
|
"acc_stderr": 0.03508984236295342, |
|
"acc_norm": 0.38341968911917096, |
|
"acc_norm_stderr": 0.03508984236295342 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.0409698513984367, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.0409698513984367 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3211009174311927, |
|
"acc_stderr": 0.020018149772733747, |
|
"acc_norm": 0.3211009174311927, |
|
"acc_norm_stderr": 0.020018149772733747 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3412698412698413, |
|
"acc_stderr": 0.042407993275749255, |
|
"acc_norm": 0.3412698412698413, |
|
"acc_norm_stderr": 0.042407993275749255 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.434640522875817, |
|
"acc_stderr": 0.028384256704883037, |
|
"acc_norm": 0.434640522875817, |
|
"acc_norm_stderr": 0.028384256704883037 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5785123966942148, |
|
"acc_stderr": 0.04507732278775088, |
|
"acc_norm": 0.5785123966942148, |
|
"acc_norm_stderr": 0.04507732278775088 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3092105263157895, |
|
"acc_stderr": 0.03761070869867479, |
|
"acc_norm": 0.3092105263157895, |
|
"acc_norm_stderr": 0.03761070869867479 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3202614379084967, |
|
"acc_stderr": 0.01887568293806944, |
|
"acc_norm": 0.3202614379084967, |
|
"acc_norm_stderr": 0.01887568293806944 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32978723404255317, |
|
"acc_stderr": 0.0280459469420424, |
|
"acc_norm": 0.32978723404255317, |
|
"acc_norm_stderr": 0.0280459469420424 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.04464285714285714, |
|
"acc_norm": 0.33035714285714285, |
|
"acc_norm_stderr": 0.04464285714285714 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3287037037037037, |
|
"acc_stderr": 0.03203614084670058, |
|
"acc_norm": 0.3287037037037037, |
|
"acc_norm_stderr": 0.03203614084670058 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.014333522059217892, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.014333522059217892 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2977941176470588, |
|
"acc_stderr": 0.027778298701545443, |
|
"acc_norm": 0.2977941176470588, |
|
"acc_norm_stderr": 0.027778298701545443 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.45714285714285713, |
|
"acc_stderr": 0.031891418324213966, |
|
"acc_norm": 0.45714285714285713, |
|
"acc_norm_stderr": 0.031891418324213966 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4219409282700422, |
|
"acc_stderr": 0.03214814630240369, |
|
"acc_norm": 0.4219409282700422, |
|
"acc_norm_stderr": 0.03214814630240369 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.31486310299869624, |
|
"acc_stderr": 0.011862561755715937, |
|
"acc_norm": 0.31486310299869624, |
|
"acc_norm_stderr": 0.011862561755715937 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.36764705882352944, |
|
"acc_stderr": 0.03384132045674118, |
|
"acc_norm": 0.36764705882352944, |
|
"acc_norm_stderr": 0.03384132045674118 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.41818181818181815, |
|
"acc_stderr": 0.03851716319398395, |
|
"acc_norm": 0.41818181818181815, |
|
"acc_norm_stderr": 0.03851716319398395 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2692778457772338, |
|
"mc1_stderr": 0.015528566637087307, |
|
"mc2": 0.4379048810658281, |
|
"mc2_stderr": 0.01538143830058003 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3140495867768595, |
|
"acc_stderr": 0.015957332434295066, |
|
"acc_norm": 0.4085005903187721, |
|
"acc_norm_stderr": 0.01690006287942711 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Changgil/K2S3-Mistral-7b-v1.1", |
|
"model_sha": "0f7e1ed84843f50791fa74315dfa0f975f300344", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |