File size: 2,952 Bytes
9467830 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": 0.0,
"eqbench_stderr,none": 0.0,
"percent_parseable,none": 0.0,
"percent_parseable_stderr,none": 0.0
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.18686868686868688,
"acc_norm_stderr,none": 0.027772533334218977
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.00530705079605762,
"exact_match_stderr,strict-match": 0.002001305720948063,
"exact_match,flexible-extract": 0.04094010614101592,
"exact_match_stderr,flexible-extract": 0.0054580767962943404
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.22267206477732793,
"prompt_level_strict_acc_stderr,none": 0.018737495700385506,
"inst_level_strict_acc,none": 0.28338278931750743,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.24696356275303644,
"prompt_level_loose_acc_stderr,none": 0.019422314252520508,
"inst_level_loose_acc,none": 0.30712166172106825,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.5682715074980268,
"acc_stderr,none": 0.01392087211001071
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.1576231691078562,
"acc_norm_stderr,none": 0.004701478107883536
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.6189000000000143,
"acc_norm_stderr,none": 0.002058055258741626
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.42925,
"acc_norm_stderr,none": 0.006961246168906313
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.5099848459656374,
"A-SVA_stderr,none": 0.0037636335981926106
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "NousResearch/Nous-Hermes-llama-2-7b",
"model_sha": "b7c3ec54b754175e006ef75696a2ba3802697078",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
} |