results
/
ENERGY-DRINK-LOVE
/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_DPOv3
/result_2024-03-08 00:52:09.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.4513651877133106, | |
"acc_stderr": 0.014542104569955265, | |
"acc_norm": 0.5085324232081911, | |
"acc_norm_stderr": 0.014609263165632175 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.46335391356303524, | |
"acc_stderr": 0.004976361454341332, | |
"acc_norm": 0.6296554471220872, | |
"acc_norm_stderr": 0.0048191004568678125 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.6549707602339181, | |
"acc_stderr": 0.036459813773888065, | |
"acc_norm": 0.6549707602339181, | |
"acc_norm_stderr": 0.036459813773888065 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.6213592233009708, | |
"acc_stderr": 0.04802694698258975, | |
"acc_norm": 0.6213592233009708, | |
"acc_norm_stderr": 0.04802694698258975 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.685823754789272, | |
"acc_stderr": 0.01659929173588492, | |
"acc_norm": 0.685823754789272, | |
"acc_norm_stderr": 0.01659929173588492 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4074074074074074, | |
"acc_stderr": 0.0424463323835323, | |
"acc_norm": 0.4074074074074074, | |
"acc_norm_stderr": 0.0424463323835323 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.04560480215720683, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720683 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.4340425531914894, | |
"acc_stderr": 0.032400380867927465, | |
"acc_norm": 0.4340425531914894, | |
"acc_norm_stderr": 0.032400380867927465 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.4819277108433735, | |
"acc_stderr": 0.03889951252827216, | |
"acc_norm": 0.4819277108433735, | |
"acc_norm_stderr": 0.03889951252827216 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5787781350482315, | |
"acc_stderr": 0.028043399858210628, | |
"acc_norm": 0.5787781350482315, | |
"acc_norm_stderr": 0.028043399858210628 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.5650224215246636, | |
"acc_stderr": 0.03327283370271344, | |
"acc_norm": 0.5650224215246636, | |
"acc_norm_stderr": 0.03327283370271344 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.5419847328244275, | |
"acc_stderr": 0.04369802690578756, | |
"acc_norm": 0.5419847328244275, | |
"acc_norm_stderr": 0.04369802690578756 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.04999999999999999, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.04999999999999999 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.7373737373737373, | |
"acc_stderr": 0.03135305009533087, | |
"acc_norm": 0.7373737373737373, | |
"acc_norm_stderr": 0.03135305009533087 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4413793103448276, | |
"acc_stderr": 0.04137931034482757, | |
"acc_norm": 0.4413793103448276, | |
"acc_norm_stderr": 0.04137931034482757 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.23529411764705882, | |
"acc_stderr": 0.04220773659171452, | |
"acc_norm": 0.23529411764705882, | |
"acc_norm_stderr": 0.04220773659171452 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5546218487394958, | |
"acc_stderr": 0.03228410626716391, | |
"acc_norm": 0.5546218487394958, | |
"acc_norm_stderr": 0.03228410626716391 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.5461538461538461, | |
"acc_stderr": 0.025242770987126194, | |
"acc_norm": 0.5461538461538461, | |
"acc_norm_stderr": 0.025242770987126194 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.55, | |
"acc_stderr": 0.04999999999999999, | |
"acc_norm": 0.55, | |
"acc_norm_stderr": 0.04999999999999999 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.0440844002276808, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.0440844002276808 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.6203703703703703, | |
"acc_stderr": 0.04691521224077742, | |
"acc_norm": 0.6203703703703703, | |
"acc_norm_stderr": 0.04691521224077742 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.4236453201970443, | |
"acc_stderr": 0.034767257476490364, | |
"acc_norm": 0.4236453201970443, | |
"acc_norm_stderr": 0.034767257476490364 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5935483870967742, | |
"acc_stderr": 0.027941727346256308, | |
"acc_norm": 0.5935483870967742, | |
"acc_norm_stderr": 0.027941727346256308 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.7777777777777778, | |
"acc_stderr": 0.027236013946196673, | |
"acc_norm": 0.7777777777777778, | |
"acc_norm_stderr": 0.027236013946196673 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.5169811320754717, | |
"acc_stderr": 0.030755120364119898, | |
"acc_norm": 0.5169811320754717, | |
"acc_norm_stderr": 0.030755120364119898 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5454545454545454, | |
"acc_stderr": 0.04769300568972744, | |
"acc_norm": 0.5454545454545454, | |
"acc_norm_stderr": 0.04769300568972744 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.34444444444444444, | |
"acc_stderr": 0.02897264888484427, | |
"acc_norm": 0.34444444444444444, | |
"acc_norm_stderr": 0.02897264888484427 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.36423841059602646, | |
"acc_stderr": 0.03929111781242742, | |
"acc_norm": 0.36423841059602646, | |
"acc_norm_stderr": 0.03929111781242742 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6965174129353234, | |
"acc_stderr": 0.03251006816458618, | |
"acc_norm": 0.6965174129353234, | |
"acc_norm_stderr": 0.03251006816458618 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.4797687861271676, | |
"acc_stderr": 0.03809342081273957, | |
"acc_norm": 0.4797687861271676, | |
"acc_norm_stderr": 0.03809342081273957 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.373015873015873, | |
"acc_stderr": 0.02490699045899257, | |
"acc_norm": 0.373015873015873, | |
"acc_norm_stderr": 0.02490699045899257 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.04181210050035455, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.04181210050035455 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.77, | |
"acc_stderr": 0.04229525846816506, | |
"acc_norm": 0.77, | |
"acc_norm_stderr": 0.04229525846816506 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5751445086705202, | |
"acc_stderr": 0.026613350840261733, | |
"acc_norm": 0.5751445086705202, | |
"acc_norm_stderr": 0.026613350840261733 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.49079754601226994, | |
"acc_stderr": 0.039277056007874414, | |
"acc_norm": 0.49079754601226994, | |
"acc_norm_stderr": 0.039277056007874414 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.6049382716049383, | |
"acc_stderr": 0.02720111766692565, | |
"acc_norm": 0.6049382716049383, | |
"acc_norm_stderr": 0.02720111766692565 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.6787564766839378, | |
"acc_stderr": 0.033699508685490674, | |
"acc_norm": 0.6787564766839378, | |
"acc_norm_stderr": 0.033699508685490674 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.42105263157894735, | |
"acc_stderr": 0.046446020912223177, | |
"acc_norm": 0.42105263157894735, | |
"acc_norm_stderr": 0.046446020912223177 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.6330275229357798, | |
"acc_stderr": 0.020664675659520536, | |
"acc_norm": 0.6330275229357798, | |
"acc_norm_stderr": 0.020664675659520536 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.3253968253968254, | |
"acc_stderr": 0.04190596438871136, | |
"acc_norm": 0.3253968253968254, | |
"acc_norm_stderr": 0.04190596438871136 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.6045751633986928, | |
"acc_stderr": 0.027996723180631452, | |
"acc_norm": 0.6045751633986928, | |
"acc_norm_stderr": 0.027996723180631452 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.52, | |
"acc_stderr": 0.05021167315686779, | |
"acc_norm": 0.52, | |
"acc_norm_stderr": 0.05021167315686779 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.7603305785123967, | |
"acc_stderr": 0.03896878985070417, | |
"acc_norm": 0.7603305785123967, | |
"acc_norm_stderr": 0.03896878985070417 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.5723684210526315, | |
"acc_stderr": 0.04026097083296563, | |
"acc_norm": 0.5723684210526315, | |
"acc_norm_stderr": 0.04026097083296563 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.4738562091503268, | |
"acc_stderr": 0.020200164564804588, | |
"acc_norm": 0.4738562091503268, | |
"acc_norm_stderr": 0.020200164564804588 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.36879432624113473, | |
"acc_stderr": 0.02878222756134724, | |
"acc_norm": 0.36879432624113473, | |
"acc_norm_stderr": 0.02878222756134724 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.3482142857142857, | |
"acc_stderr": 0.04521829902833587, | |
"acc_norm": 0.3482142857142857, | |
"acc_norm_stderr": 0.04521829902833587 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.46296296296296297, | |
"acc_stderr": 0.03400603625538271, | |
"acc_norm": 0.46296296296296297, | |
"acc_norm_stderr": 0.03400603625538271 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2, | |
"acc_stderr": 0.013378001241813072, | |
"acc_norm": 0.2, | |
"acc_norm_stderr": 0.013378001241813072 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.68, | |
"acc_stderr": 0.04688261722621504, | |
"acc_norm": 0.68, | |
"acc_norm_stderr": 0.04688261722621504 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.5036764705882353, | |
"acc_stderr": 0.0303720158854282, | |
"acc_norm": 0.5036764705882353, | |
"acc_norm_stderr": 0.0303720158854282 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.6, | |
"acc_stderr": 0.031362502409358936, | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.031362502409358936 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.70042194092827, | |
"acc_stderr": 0.02981802474975309, | |
"acc_norm": 0.70042194092827, | |
"acc_norm_stderr": 0.02981802474975309 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.378748370273794, | |
"acc_stderr": 0.012389052105003743, | |
"acc_norm": 0.378748370273794, | |
"acc_norm_stderr": 0.012389052105003743 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.6617647058823529, | |
"acc_stderr": 0.03320574612945431, | |
"acc_norm": 0.6617647058823529, | |
"acc_norm_stderr": 0.03320574612945431 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.6666666666666666, | |
"acc_stderr": 0.036810508691615486, | |
"acc_norm": 0.6666666666666666, | |
"acc_norm_stderr": 0.036810508691615486 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.29008567931456547, | |
"mc1_stderr": 0.01588623687420952, | |
"mc2": 0.4275177008853309, | |
"mc2_stderr": 0.015353974714185103 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.3990554899645809, | |
"acc_stderr": 0.0168363772928493, | |
"acc_norm": 0.43919716646989376, | |
"acc_norm_stderr": 0.0170627757447807 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_DPOv3", | |
"model_sha": "d4112540ee507d0a4fc61d60c954600c82984058", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |