|
{ |
|
"results": { |
|
"ko_eqbench": { |
|
"alias": " - ko_eqbench", |
|
"eqbench,none": 3.9870689327627002, |
|
"eqbench_stderr,none": 2.4825043880389424, |
|
"percent_parseable,none": 72.51461988304094, |
|
"percent_parseable_stderr,none": 3.4240429246915776 |
|
}, |
|
"ko_gpqa_diamond_zeroshot": { |
|
"alias": " - ko_gpqa_diamond_zeroshot", |
|
"acc_norm,none": 0.2222222222222222, |
|
"acc_norm_stderr,none": 0.02962022787479047 |
|
}, |
|
"ko_gsm8k": { |
|
"alias": " - ko_gsm8k", |
|
"exact_match,strict-match": 0.10538286580742987, |
|
"exact_match_stderr,strict-match": 0.008457575884041772, |
|
"exact_match,flexible-extract": 0.12206216830932524, |
|
"exact_match_stderr,flexible-extract": 0.009017054965766503 |
|
}, |
|
"ko_ifeval": { |
|
"alias": " - ko_ifeval", |
|
"prompt_level_strict_acc,none": 0.1680161943319838, |
|
"prompt_level_strict_acc_stderr,none": 0.016838738345713967, |
|
"inst_level_strict_acc,none": 0.23590504451038577, |
|
"inst_level_strict_acc_stderr,none": "N/A", |
|
"prompt_level_loose_acc,none": 0.1862348178137652, |
|
"prompt_level_loose_acc_stderr,none": 0.01753301206479297, |
|
"inst_level_loose_acc,none": 0.2551928783382789, |
|
"inst_level_loose_acc_stderr,none": "N/A" |
|
}, |
|
"ko_winogrande": { |
|
"alias": " - ko_winogrande", |
|
"acc,none": 0.5122336227308603, |
|
"acc_stderr,none": 0.01404827882040562 |
|
}, |
|
"kornat_common": { |
|
"alias": " - kornat_common", |
|
"acc_norm,none": 0.15163115845539282, |
|
"acc_norm_stderr,none": 0.004627620809162965 |
|
}, |
|
"kornat_harmless": { |
|
"alias": " - kornat_harmless", |
|
"acc_norm,none": 0.6125333333333481, |
|
"acc_norm_stderr,none": 0.002096732395845006 |
|
}, |
|
"kornat_helpful": { |
|
"alias": " - kornat_helpful", |
|
"acc_norm,none": 0.39825, |
|
"acc_norm_stderr,none": 0.007038162872391454 |
|
}, |
|
"kornat_social": { |
|
"alias": " - kornat_social", |
|
"A-SVA,none": 0.5098129389276078, |
|
"A-SVA_stderr,none": 0.0037631949237222793 |
|
} |
|
}, |
|
"versions": { |
|
"all": 2, |
|
"ko_eqbench": 2, |
|
"ko_gpqa_diamond_zeroshot": 2, |
|
"ko_gsm8k": 2, |
|
"ko_ifeval": 2, |
|
"ko_winogrande": 2, |
|
"kornat_common": 2, |
|
"kornat_harmless": 2, |
|
"kornat_helpful": 2, |
|
"kornat_social": 2 |
|
}, |
|
"config_general": { |
|
"model_name": "Herry443/Mistral-7B-KNUT-v0.3", |
|
"model_sha": "089a962c7ef124af537742bd25034c601f264fae", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |