results / BAAI /Infinity-Instruct-3M-0625-Llama3-8B /result_2024-07-18 22:34:41.json
choco9966
add backup results
70a679f
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.40017064846416384,
"acc_stderr": 0.014317197787809186,
"acc_norm": 0.44197952218430037,
"acc_norm_stderr": 0.014512682523128345
},
"harness|ko_hellaswag|10": {
"acc": 0.39075881298546106,
"acc_stderr": 0.004869232758103322,
"acc_norm": 0.5248954391555467,
"acc_norm_stderr": 0.004983592410934173
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6257309941520468,
"acc_stderr": 0.03711601185389481,
"acc_norm": 0.6257309941520468,
"acc_norm_stderr": 0.03711601185389481
},
"harness|ko_mmlu_management|5": {
"acc": 0.6407766990291263,
"acc_stderr": 0.04750458399041696,
"acc_norm": 0.6407766990291263,
"acc_norm_stderr": 0.04750458399041696
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5721583652618135,
"acc_stderr": 0.017692787927803724,
"acc_norm": 0.5721583652618135,
"acc_norm_stderr": 0.017692787927803724
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4222222222222222,
"acc_stderr": 0.042667634040995814,
"acc_norm": 0.4222222222222222,
"acc_norm_stderr": 0.042667634040995814
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4851063829787234,
"acc_stderr": 0.03267151848924777,
"acc_norm": 0.4851063829787234,
"acc_norm_stderr": 0.03267151848924777
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42771084337349397,
"acc_stderr": 0.03851597683718533,
"acc_norm": 0.42771084337349397,
"acc_norm_stderr": 0.03851597683718533
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5369774919614148,
"acc_stderr": 0.02832032583010591,
"acc_norm": 0.5369774919614148,
"acc_norm_stderr": 0.02832032583010591
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4977578475336323,
"acc_stderr": 0.033557465352232634,
"acc_norm": 0.4977578475336323,
"acc_norm_stderr": 0.033557465352232634
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5343511450381679,
"acc_stderr": 0.04374928560599738,
"acc_norm": 0.5343511450381679,
"acc_norm_stderr": 0.04374928560599738
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956914,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956914
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6262626262626263,
"acc_stderr": 0.034468977386593325,
"acc_norm": 0.6262626262626263,
"acc_norm_stderr": 0.034468977386593325
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5379310344827586,
"acc_stderr": 0.04154659671707548,
"acc_norm": 0.5379310344827586,
"acc_norm_stderr": 0.04154659671707548
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3235294117647059,
"acc_stderr": 0.046550104113196177,
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.046550104113196177
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.032422250271150053,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.032422250271150053
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5256410256410257,
"acc_stderr": 0.025317649726448673,
"acc_norm": 0.5256410256410257,
"acc_norm_stderr": 0.025317649726448673
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.04557239513497751
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4236453201970443,
"acc_stderr": 0.034767257476490364,
"acc_norm": 0.4236453201970443,
"acc_norm_stderr": 0.034767257476490364
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5032258064516129,
"acc_stderr": 0.02844341422643831,
"acc_norm": 0.5032258064516129,
"acc_norm_stderr": 0.02844341422643831
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7905982905982906,
"acc_stderr": 0.026655699653922737,
"acc_norm": 0.7905982905982906,
"acc_norm_stderr": 0.026655699653922737
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5094339622641509,
"acc_stderr": 0.0307673947078081,
"acc_norm": 0.5094339622641509,
"acc_norm_stderr": 0.0307673947078081
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5818181818181818,
"acc_stderr": 0.04724577405731572,
"acc_norm": 0.5818181818181818,
"acc_norm_stderr": 0.04724577405731572
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.362962962962963,
"acc_stderr": 0.029318203645206865,
"acc_norm": 0.362962962962963,
"acc_norm_stderr": 0.029318203645206865
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.03333333333333333,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.03333333333333333
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.43352601156069365,
"acc_stderr": 0.037786210790920545,
"acc_norm": 0.43352601156069365,
"acc_norm_stderr": 0.037786210790920545
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3994708994708995,
"acc_stderr": 0.025225450284067877,
"acc_norm": 0.3994708994708995,
"acc_norm_stderr": 0.025225450284067877
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5,
"acc_stderr": 0.04181210050035455,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04181210050035455
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.72,
"acc_stderr": 0.045126085985421255,
"acc_norm": 0.72,
"acc_norm_stderr": 0.045126085985421255
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5751445086705202,
"acc_stderr": 0.026613350840261733,
"acc_norm": 0.5751445086705202,
"acc_norm_stderr": 0.026613350840261733
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.48466257668711654,
"acc_stderr": 0.039265223787088424,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.039265223787088424
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5524691358024691,
"acc_stderr": 0.0276671385694227,
"acc_norm": 0.5524691358024691,
"acc_norm_stderr": 0.0276671385694227
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5958549222797928,
"acc_stderr": 0.03541508578884021,
"acc_norm": 0.5958549222797928,
"acc_norm_stderr": 0.03541508578884021
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.37719298245614036,
"acc_stderr": 0.04559522141958216,
"acc_norm": 0.37719298245614036,
"acc_norm_stderr": 0.04559522141958216
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5944954128440367,
"acc_stderr": 0.02105099799189684,
"acc_norm": 0.5944954128440367,
"acc_norm_stderr": 0.02105099799189684
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.02858034106513829,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.02858034106513829
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.743801652892562,
"acc_stderr": 0.0398497965330287,
"acc_norm": 0.743801652892562,
"acc_norm_stderr": 0.0398497965330287
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5263157894736842,
"acc_stderr": 0.040633027314866725,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.040633027314866725
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.46078431372549017,
"acc_stderr": 0.02016552331390791,
"acc_norm": 0.46078431372549017,
"acc_norm_stderr": 0.02016552331390791
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.36879432624113473,
"acc_stderr": 0.028782227561347247,
"acc_norm": 0.36879432624113473,
"acc_norm_stderr": 0.028782227561347247
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.5089285714285714,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.5089285714285714,
"acc_norm_stderr": 0.04745033255489123
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.03388857118502325,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.03388857118502325
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.264804469273743,
"acc_stderr": 0.01475690648326066,
"acc_norm": 0.264804469273743,
"acc_norm_stderr": 0.01475690648326066
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.39338235294117646,
"acc_stderr": 0.02967428828131118,
"acc_norm": 0.39338235294117646,
"acc_norm_stderr": 0.02967428828131118
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6040816326530613,
"acc_stderr": 0.03130802899065686,
"acc_norm": 0.6040816326530613,
"acc_norm_stderr": 0.03130802899065686
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6540084388185654,
"acc_stderr": 0.03096481058878671,
"acc_norm": 0.6540084388185654,
"acc_norm_stderr": 0.03096481058878671
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3559322033898305,
"acc_stderr": 0.012228645537277575,
"acc_norm": 0.3559322033898305,
"acc_norm_stderr": 0.012228645537277575
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6078431372549019,
"acc_stderr": 0.03426712349247273,
"acc_norm": 0.6078431372549019,
"acc_norm_stderr": 0.03426712349247273
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.03756335775187896,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.03756335775187896
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.32313341493268055,
"mc1_stderr": 0.016371836286454604,
"mc2": 0.48886729185685895,
"mc2_stderr": 0.01547303599735754
},
"harness|ko_commongen_v2|2": {
"acc": 0.5017709563164109,
"acc_stderr": 0.017190246276231863,
"acc_norm": 0.5572609208972845,
"acc_norm_stderr": 0.01707725413155622
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "BAAI/Infinity-Instruct-3M-0625-Llama3-8B",
"model_sha": "7be7c0ff1e35c3bb781c47222da99a1724f5f1da",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}