|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.44112627986348124, |
|
"acc_stderr": 0.014509747749064663, |
|
"acc_norm": 0.5051194539249146, |
|
"acc_norm_stderr": 0.014610624890309154 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.38836885082652856, |
|
"acc_stderr": 0.0048638313648480805, |
|
"acc_norm": 0.5151364270065724, |
|
"acc_norm_stderr": 0.004987494455523721 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5730994152046783, |
|
"acc_stderr": 0.03793620616529917, |
|
"acc_norm": 0.5730994152046783, |
|
"acc_norm_stderr": 0.03793620616529917 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6310679611650486, |
|
"acc_stderr": 0.0477761518115674, |
|
"acc_norm": 0.6310679611650486, |
|
"acc_norm_stderr": 0.0477761518115674 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.45721583652618136, |
|
"acc_stderr": 0.01781438523853443, |
|
"acc_norm": 0.45721583652618136, |
|
"acc_norm_stderr": 0.01781438523853443 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.362962962962963, |
|
"acc_stderr": 0.04153948404742399, |
|
"acc_norm": 0.362962962962963, |
|
"acc_norm_stderr": 0.04153948404742399 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847415, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847415 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4765957446808511, |
|
"acc_stderr": 0.03265019475033582, |
|
"acc_norm": 0.4765957446808511, |
|
"acc_norm_stderr": 0.03265019475033582 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3614457831325301, |
|
"acc_stderr": 0.0374005938202932, |
|
"acc_norm": 0.3614457831325301, |
|
"acc_norm_stderr": 0.0374005938202932 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5787781350482315, |
|
"acc_stderr": 0.02804339985821063, |
|
"acc_norm": 0.5787781350482315, |
|
"acc_norm_stderr": 0.02804339985821063 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5067264573991032, |
|
"acc_stderr": 0.03355476596234354, |
|
"acc_norm": 0.5067264573991032, |
|
"acc_norm_stderr": 0.03355476596234354 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4961832061068702, |
|
"acc_stderr": 0.043851623256015534, |
|
"acc_norm": 0.4961832061068702, |
|
"acc_norm_stderr": 0.043851623256015534 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.51010101010101, |
|
"acc_stderr": 0.035616254886737454, |
|
"acc_norm": 0.51010101010101, |
|
"acc_norm_stderr": 0.035616254886737454 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5862068965517241, |
|
"acc_stderr": 0.041042692118062316, |
|
"acc_norm": 0.5862068965517241, |
|
"acc_norm_stderr": 0.041042692118062316 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.048580835742663454, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.048580835742663454 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5714285714285714, |
|
"acc_stderr": 0.03214536859788639, |
|
"acc_norm": 0.5714285714285714, |
|
"acc_norm_stderr": 0.03214536859788639 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.517948717948718, |
|
"acc_stderr": 0.025334667080954897, |
|
"acc_norm": 0.517948717948718, |
|
"acc_norm_stderr": 0.025334667080954897 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6296296296296297, |
|
"acc_stderr": 0.04668408033024931, |
|
"acc_norm": 0.6296296296296297, |
|
"acc_norm_stderr": 0.04668408033024931 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4630541871921182, |
|
"acc_stderr": 0.035083705204426656, |
|
"acc_norm": 0.4630541871921182, |
|
"acc_norm_stderr": 0.035083705204426656 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5580645161290323, |
|
"acc_stderr": 0.02825155790684974, |
|
"acc_norm": 0.5580645161290323, |
|
"acc_norm_stderr": 0.02825155790684974 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7478632478632479, |
|
"acc_stderr": 0.02844796547623102, |
|
"acc_norm": 0.7478632478632479, |
|
"acc_norm_stderr": 0.02844796547623102 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5207547169811321, |
|
"acc_stderr": 0.030746349975723463, |
|
"acc_norm": 0.5207547169811321, |
|
"acc_norm_stderr": 0.030746349975723463 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5363636363636364, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.5363636363636364, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3962962962962963, |
|
"acc_stderr": 0.029822619458533997, |
|
"acc_norm": 0.3962962962962963, |
|
"acc_norm_stderr": 0.029822619458533997 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3841059602649007, |
|
"acc_stderr": 0.03971301814719198, |
|
"acc_norm": 0.3841059602649007, |
|
"acc_norm_stderr": 0.03971301814719198 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6965174129353234, |
|
"acc_stderr": 0.03251006816458619, |
|
"acc_norm": 0.6965174129353234, |
|
"acc_norm_stderr": 0.03251006816458619 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.47398843930635837, |
|
"acc_stderr": 0.038073017265045105, |
|
"acc_norm": 0.47398843930635837, |
|
"acc_norm_stderr": 0.038073017265045105 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.40476190476190477, |
|
"acc_stderr": 0.025279850397404904, |
|
"acc_norm": 0.40476190476190477, |
|
"acc_norm_stderr": 0.025279850397404904 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4652777777777778, |
|
"acc_stderr": 0.04171115858181618, |
|
"acc_norm": 0.4652777777777778, |
|
"acc_norm_stderr": 0.04171115858181618 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.74, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.74, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5549132947976878, |
|
"acc_stderr": 0.02675625512966377, |
|
"acc_norm": 0.5549132947976878, |
|
"acc_norm_stderr": 0.02675625512966377 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4723926380368098, |
|
"acc_stderr": 0.039223782906109894, |
|
"acc_norm": 0.4723926380368098, |
|
"acc_norm_stderr": 0.039223782906109894 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.0277012284685426, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.0277012284685426 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5803108808290155, |
|
"acc_stderr": 0.035615873276858834, |
|
"acc_norm": 0.5803108808290155, |
|
"acc_norm_stderr": 0.035615873276858834 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.34210526315789475, |
|
"acc_stderr": 0.044629175353369376, |
|
"acc_norm": 0.34210526315789475, |
|
"acc_norm_stderr": 0.044629175353369376 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6110091743119266, |
|
"acc_stderr": 0.020902300887392866, |
|
"acc_norm": 0.6110091743119266, |
|
"acc_norm_stderr": 0.020902300887392866 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.0442626668137991, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.0442626668137991 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.565359477124183, |
|
"acc_stderr": 0.028384256704883037, |
|
"acc_norm": 0.565359477124183, |
|
"acc_norm_stderr": 0.028384256704883037 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7024793388429752, |
|
"acc_stderr": 0.04173349148083498, |
|
"acc_norm": 0.7024793388429752, |
|
"acc_norm_stderr": 0.04173349148083498 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5460526315789473, |
|
"acc_stderr": 0.04051646342874143, |
|
"acc_norm": 0.5460526315789473, |
|
"acc_norm_stderr": 0.04051646342874143 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.46078431372549017, |
|
"acc_stderr": 0.02016552331390791, |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.02016552331390791 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.35815602836879434, |
|
"acc_stderr": 0.028602085862759422, |
|
"acc_norm": 0.35815602836879434, |
|
"acc_norm_stderr": 0.028602085862759422 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.45535714285714285, |
|
"acc_stderr": 0.04726835553719099, |
|
"acc_norm": 0.45535714285714285, |
|
"acc_norm_stderr": 0.04726835553719099 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.46296296296296297, |
|
"acc_stderr": 0.03400603625538272, |
|
"acc_norm": 0.46296296296296297, |
|
"acc_norm_stderr": 0.03400603625538272 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2860335195530726, |
|
"acc_stderr": 0.015113972129062136, |
|
"acc_norm": 0.2860335195530726, |
|
"acc_norm_stderr": 0.015113972129062136 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.72, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.72, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.40808823529411764, |
|
"acc_stderr": 0.029855261393483927, |
|
"acc_norm": 0.40808823529411764, |
|
"acc_norm_stderr": 0.029855261393483927 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.636734693877551, |
|
"acc_stderr": 0.030789051139030806, |
|
"acc_norm": 0.636734693877551, |
|
"acc_norm_stderr": 0.030789051139030806 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6666666666666666, |
|
"acc_stderr": 0.03068582059661082, |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.03068582059661082 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.38396349413298564, |
|
"acc_stderr": 0.01242158783313423, |
|
"acc_norm": 0.38396349413298564, |
|
"acc_norm_stderr": 0.01242158783313423 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5784313725490197, |
|
"acc_stderr": 0.03465868196380762, |
|
"acc_norm": 0.5784313725490197, |
|
"acc_norm_stderr": 0.03465868196380762 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6848484848484848, |
|
"acc_stderr": 0.0362773057502241, |
|
"acc_norm": 0.6848484848484848, |
|
"acc_norm_stderr": 0.0362773057502241 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.38310893512851896, |
|
"mc1_stderr": 0.017018461679389862, |
|
"mc2": 0.5663893700124538, |
|
"mc2_stderr": 0.016082183282294993 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5324675324675324, |
|
"acc_stderr": 0.017154073716682865, |
|
"acc_norm": 0.5572609208972845, |
|
"acc_norm_stderr": 0.01707725413155622 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DeepMount00/Llama-3-8b-Ita", |
|
"model_sha": "c399bd706c749788d260ed5f47c3c5c3190f37d9", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |