results / heegyu /WizardVicuna2-13b-hf /result_2023-10-15 11:17:29.json
choco9966's picture
Add results for 2023-10-15 11:17:29
c48cabb verified
raw
history blame
3 kB
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": 0.2545542476925691,
"eqbench_stderr,none": 0.19538193980225432,
"percent_parseable,none": 1.1695906432748537,
"percent_parseable_stderr,none": 0.8245894595445987
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.2222222222222222,
"acc_norm_stderr,none": 0.02962022787479047
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.05534495830174375,
"exact_match_stderr,strict-match": 0.006298221796179569,
"exact_match,flexible-extract": 0.05989385898407885,
"exact_match_stderr,flexible-extract": 0.006536148151288715
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.19635627530364372,
"prompt_level_strict_acc_stderr,none": 0.017890839183183842,
"inst_level_strict_acc,none": 0.30712166172106825,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2145748987854251,
"prompt_level_loose_acc_stderr,none": 0.018489211348825075,
"inst_level_loose_acc,none": 0.3264094955489614,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.5469613259668509,
"acc_stderr,none": 0.013990366632148104
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.12683089214380824,
"acc_norm_stderr,none": 0.004293711782279843
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.6041666666666807,
"acc_norm_stderr,none": 0.002059616814935469
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.39775,
"acc_norm_stderr,none": 0.0067533526959813375
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.5096090914148887,
"A-SVA_stderr,none": 0.0037639592910726203
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "heegyu/WizardVicuna2-13b-hf",
"model_sha": "6cfd95e2dcdb6996afa9eb5c63273a1a3524c6c6",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}