|
{ |
|
"results": { |
|
"ko_eqbench": { |
|
"alias": " - ko_eqbench", |
|
"eqbench,none": 59.524750790167204, |
|
"eqbench_stderr,none": 2.600437568709388, |
|
"percent_parseable,none": 88.88888888888889, |
|
"percent_parseable_stderr,none": 2.4103384202072893 |
|
}, |
|
"ko_gpqa_diamond_zeroshot": { |
|
"alias": " - ko_gpqa_diamond_zeroshot", |
|
"acc_norm,none": 0.25757575757575757, |
|
"acc_norm_stderr,none": 0.031156269519646836 |
|
}, |
|
"ko_gsm8k": { |
|
"alias": " - ko_gsm8k", |
|
"exact_match,strict-match": 0.4655041698256255, |
|
"exact_match_stderr,strict-match": 0.013739668147545918, |
|
"exact_match,flexible-extract": 0.6899166034874905, |
|
"exact_match_stderr,flexible-extract": 0.012740305717376268 |
|
}, |
|
"ko_ifeval": { |
|
"alias": " - ko_ifeval", |
|
"prompt_level_strict_acc,none": 0.6032388663967612, |
|
"prompt_level_strict_acc_stderr,none": 0.02203361512937895, |
|
"inst_level_strict_acc,none": 0.6795252225519288, |
|
"inst_level_strict_acc_stderr,none": "N/A", |
|
"prompt_level_loose_acc,none": 0.6255060728744939, |
|
"prompt_level_loose_acc_stderr,none": 0.021797901981018192, |
|
"inst_level_loose_acc,none": 0.6973293768545994, |
|
"inst_level_loose_acc_stderr,none": "N/A" |
|
}, |
|
"ko_winogrande": { |
|
"alias": " - ko_winogrande", |
|
"acc,none": 0.6511444356748224, |
|
"acc_stderr,none": 0.013395059320137327 |
|
}, |
|
"kornat_common": { |
|
"alias": " - kornat_common", |
|
"acc_norm,none": 0.38049267643142476, |
|
"acc_norm_stderr,none": 0.0062642280150217765 |
|
}, |
|
"kornat_harmless": { |
|
"alias": " - kornat_harmless", |
|
"acc_norm,none": 0.6970000000000347, |
|
"acc_norm_stderr,none": 0.00212435510727251 |
|
}, |
|
"kornat_helpful": { |
|
"alias": " - kornat_helpful", |
|
"acc_norm,none": 0.5265, |
|
"acc_norm_stderr,none": 0.0073092809354884936 |
|
}, |
|
"kornat_social": { |
|
"alias": " - kornat_social", |
|
"A-SVA,none": 0.3594230576066092, |
|
"A-SVA_stderr,none": 0.0037958311334252413 |
|
} |
|
}, |
|
"versions": { |
|
"all": 2, |
|
"ko_eqbench": 2, |
|
"ko_gpqa_diamond_zeroshot": 2, |
|
"ko_gsm8k": 2, |
|
"ko_ifeval": 2, |
|
"ko_winogrande": 2, |
|
"kornat_common": 2, |
|
"kornat_harmless": 2, |
|
"kornat_helpful": 2, |
|
"kornat_social": 2 |
|
}, |
|
"config_general": { |
|
"model_name": "ENERGY-DRINK-LOVE/rtzr_dpo-v4-hq", |
|
"model_sha": "0b119f62d1142f290a78fd1755efeeae12232752", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |