results / Herry443 /Mistral-7B-KNUT-ref-ALL /result_2024-02-05 06:09:30.json
jihoo-kim's picture
Add results for 2024-02-05 06:09:30
d102bf5 verified
raw
history blame
3.01 kB
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": -0.16448463461469004,
"eqbench_stderr,none": 0.3498642948500448,
"percent_parseable,none": 2.3391812865497075,
"percent_parseable_stderr,none": 1.1592247905734943
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.23232323232323232,
"acc_norm_stderr,none": 0.030088629490217483
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.015163002274450341,
"exact_match_stderr,strict-match": 0.0033660229497263472,
"exact_match,flexible-extract": 0.06974981046247157,
"exact_match_stderr,flexible-extract": 0.007016389571013818
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.1882591093117409,
"prompt_level_strict_acc_stderr,none": 0.017606103558163923,
"inst_level_strict_acc,none": 0.27002967359050445,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.19635627530364372,
"prompt_level_loose_acc_stderr,none": 0.017890839183183842,
"inst_level_loose_acc,none": 0.2818991097922849,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.5374901341752171,
"acc_stderr,none": 0.014012928183336569
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.18924766977363516,
"acc_norm_stderr,none": 0.005053945532959305
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.609833333333347,
"acc_norm_stderr,none": 0.0020700733127963613
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.40175,
"acc_norm_stderr,none": 0.00700074810195272
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.5041699081526603,
"A-SVA_stderr,none": 0.0037951187634595843
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "Herry443/Mistral-7B-KNUT-ref-ALL",
"model_sha": "95f28cdf865867be553670e9665149f0ca0f78c9",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}