{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.757679180887372, "acc_stderr": 0.012521593295800118, "acc_norm": 0.7901023890784983, "acc_norm_stderr": 0.011900548748047449 }, "harness|ko_hellaswag|10": { "acc": 0.7101175064728141, "acc_stderr": 0.004527804016253785, "acc_norm": 0.811790479984067, "acc_norm_stderr": 0.0039008054167367014 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.6783625730994152, "acc_stderr": 0.03582529442573122, "acc_norm": 0.6783625730994152, "acc_norm_stderr": 0.03582529442573122 }, "harness|ko_mmlu_management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.0398913985953177, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.0398913985953177 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.6743295019157088, "acc_stderr": 0.016757989458549682, "acc_norm": 0.6743295019157088, "acc_norm_stderr": 0.016757989458549682 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4808510638297872, "acc_stderr": 0.032662042990646775, "acc_norm": 0.4808510638297872, "acc_norm_stderr": 0.032662042990646775 }, "harness|ko_mmlu_virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.038899512528272166, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.038899512528272166 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.6302250803858521, "acc_stderr": 0.027417996705630998, "acc_norm": 0.6302250803858521, "acc_norm_stderr": 0.027417996705630998 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.6457399103139013, "acc_stderr": 0.032100621541349864, "acc_norm": 0.6457399103139013, "acc_norm_stderr": 0.032100621541349864 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.549618320610687, "acc_stderr": 0.04363643698524779, "acc_norm": 0.549618320610687, "acc_norm_stderr": 0.04363643698524779 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.7575757575757576, "acc_stderr": 0.030532892233932036, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.030532892233932036 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728763, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196177, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196177 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.6128205128205129, "acc_stderr": 0.02469721693087893, "acc_norm": 0.6128205128205129, "acc_norm_stderr": 0.02469721693087893 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6759259259259259, "acc_stderr": 0.04524596007030048, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.04524596007030048 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.034819048444388045, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.034819048444388045 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.6, "acc_stderr": 0.027869320571664625, "acc_norm": 0.6, "acc_norm_stderr": 0.027869320571664625 }, "harness|ko_mmlu_marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.02514093595033543, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.02514093595033543 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5886792452830188, "acc_stderr": 0.03028500925900979, "acc_norm": 0.5886792452830188, "acc_norm_stderr": 0.03028500925900979 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473072, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473072 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|ko_mmlu_sociology|5": { "acc": 0.7313432835820896, "acc_stderr": 0.03134328358208954, "acc_norm": 0.7313432835820896, "acc_norm_stderr": 0.03134328358208954 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.5606936416184971, "acc_stderr": 0.03784271932887467, "acc_norm": 0.5606936416184971, "acc_norm_stderr": 0.03784271932887467 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4523809523809524, "acc_stderr": 0.02563425811555496, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.02563425811555496 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.6111111111111112, "acc_stderr": 0.04076663253918567, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.04076663253918567 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.04512608598542126, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542126 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5809248554913294, "acc_stderr": 0.02656417811142262, "acc_norm": 0.5809248554913294, "acc_norm_stderr": 0.02656417811142262 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.6073619631901841, "acc_stderr": 0.0383674090783103, "acc_norm": 0.6073619631901841, "acc_norm_stderr": 0.0383674090783103 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.6450617283950617, "acc_stderr": 0.026624152478845853, "acc_norm": 0.6450617283950617, "acc_norm_stderr": 0.026624152478845853 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.7564766839378239, "acc_stderr": 0.030975436386845426, "acc_norm": 0.7564766839378239, "acc_norm_stderr": 0.030975436386845426 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.046920083813689104, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.046920083813689104 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.7302752293577982, "acc_stderr": 0.01902848671111545, "acc_norm": 0.7302752293577982, "acc_norm_stderr": 0.01902848671111545 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5947712418300654, "acc_stderr": 0.028110928492809075, "acc_norm": 0.5947712418300654, "acc_norm_stderr": 0.028110928492809075 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.6381578947368421, "acc_stderr": 0.03910525752849725, "acc_norm": 0.6381578947368421, "acc_norm_stderr": 0.03910525752849725 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.5751633986928104, "acc_stderr": 0.019997973035458333, "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.019997973035458333 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.450354609929078, "acc_stderr": 0.02968010556502904, "acc_norm": 0.450354609929078, "acc_norm_stderr": 0.02968010556502904 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.0340763209385405, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.0340763209385405 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.3486033519553073, "acc_stderr": 0.015937484656687022, "acc_norm": 0.3486033519553073, "acc_norm_stderr": 0.015937484656687022 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.046482319871173156, "acc_norm": 0.69, "acc_norm_stderr": 0.046482319871173156 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.5036764705882353, "acc_stderr": 0.030372015885428195, "acc_norm": 0.5036764705882353, "acc_norm_stderr": 0.030372015885428195 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.03038726291954772, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.03038726291954772 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.7257383966244726, "acc_stderr": 0.029041333510598035, "acc_norm": 0.7257383966244726, "acc_norm_stderr": 0.029041333510598035 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.44654498044328556, "acc_stderr": 0.012697046024399663, "acc_norm": 0.44654498044328556, "acc_norm_stderr": 0.012697046024399663 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.6568627450980392, "acc_stderr": 0.03332139944668086, "acc_norm": 0.6568627450980392, "acc_norm_stderr": 0.03332139944668086 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.6424242424242425, "acc_stderr": 0.03742597043806587, "acc_norm": 0.6424242424242425, "acc_norm_stderr": 0.03742597043806587 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.7980416156670747, "mc1_stderr": 0.014053957441512352, "mc2": 0.8601950601950346, "mc2_stderr": 0.011710216294067244 }, "harness|ko_commongen_v2|2": { "acc": 0.5017709563164109, "acc_stderr": 0.017190246276231863, "acc_norm": 0.526564344746163, "acc_norm_stderr": 0.017166075717577747 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "I-BRICKS/Cerebro_BM_solar_v01", "model_sha": "31bbd0564cdb8917c6a7825274bee9245ff8d9c8", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }