|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 8, |
|
"max_samples": null, |
|
"job_id": "", |
|
"start_time": 1379672.653174776, |
|
"end_time": 1380902.32375736, |
|
"total_evaluation_time_secondes": "1229.6705825841054", |
|
"model_name": "HuggingFaceTB/SmolLM-360M-2T-decay", |
|
"model_sha": "0ae3a7ab88864d2f8ad177dffba413ff005dacde", |
|
"model_dtype": "torch.float32", |
|
"model_size": "1.35 GB", |
|
"config": null |
|
}, |
|
"results": { |
|
"custom|arc:challenge|0": { |
|
"acc": 0.22184300341296928, |
|
"acc_stderr": 0.012141659068147886, |
|
"acc_norm": 0.24914675767918087, |
|
"acc_norm_stderr": 0.012639407111926433 |
|
}, |
|
"custom|arc:easy|0": { |
|
"acc": 0.5774410774410774, |
|
"acc_stderr": 0.010135978222981082, |
|
"acc_norm": 0.5244107744107744, |
|
"acc_norm_stderr": 0.010247548905242274 |
|
}, |
|
"custom|commonsense_qa|0": { |
|
"acc": 0.3063063063063063, |
|
"acc_stderr": 0.013197203908506533, |
|
"acc_norm": 0.3202293202293202, |
|
"acc_norm_stderr": 0.01335770492627267 |
|
}, |
|
"custom|hellaswag|0": { |
|
"acc": 0.2746464847639912, |
|
"acc_stderr": 0.004454237797448343, |
|
"acc_norm": 0.27912766381198967, |
|
"acc_norm_stderr": 0.004476536569056587 |
|
}, |
|
"custom|mmlu_cloze:abstract_algebra|0": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036625, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.03775251680686371 |
|
}, |
|
"custom|mmlu_cloze:anatomy|0": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.03972552884785136, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04072314811876837 |
|
}, |
|
"custom|mmlu_cloze:astronomy|0": { |
|
"acc": 0.23026315789473684, |
|
"acc_stderr": 0.03426059424403165, |
|
"acc_norm": 0.3026315789473684, |
|
"acc_norm_stderr": 0.03738520676119667 |
|
}, |
|
"custom|mmlu_cloze:business_ethics|0": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"custom|mmlu_cloze:clinical_knowledge|0": { |
|
"acc": 0.2188679245283019, |
|
"acc_stderr": 0.02544786382510863, |
|
"acc_norm": 0.30566037735849055, |
|
"acc_norm_stderr": 0.028353298073322663 |
|
}, |
|
"custom|mmlu_cloze:college_biology|0": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.03773809990686935 |
|
}, |
|
"custom|mmlu_cloze:college_chemistry|0": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"custom|mmlu_cloze:college_computer_science|0": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"custom|mmlu_cloze:college_mathematics|0": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.0377525168068637, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036622 |
|
}, |
|
"custom|mmlu_cloze:college_medicine|0": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.03295304696818318, |
|
"acc_norm": 0.2774566473988439, |
|
"acc_norm_stderr": 0.034140140070440354 |
|
}, |
|
"custom|mmlu_cloze:college_physics|0": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237656, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237656 |
|
}, |
|
"custom|mmlu_cloze:computer_security|0": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"custom|mmlu_cloze:conceptual_physics|0": { |
|
"acc": 0.30638297872340425, |
|
"acc_stderr": 0.03013590647851756, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.029241883869628817 |
|
}, |
|
"custom|mmlu_cloze:econometrics|0": { |
|
"acc": 0.20175438596491227, |
|
"acc_stderr": 0.037752050135836386, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"custom|mmlu_cloze:electrical_engineering|0": { |
|
"acc": 0.22758620689655173, |
|
"acc_stderr": 0.03493950380131184, |
|
"acc_norm": 0.27586206896551724, |
|
"acc_norm_stderr": 0.03724563619774632 |
|
}, |
|
"custom|mmlu_cloze:elementary_mathematics|0": { |
|
"acc": 0.2328042328042328, |
|
"acc_stderr": 0.021765961672154537, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.0220190800122179 |
|
}, |
|
"custom|mmlu_cloze:formal_logic|0": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.04006168083848876, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235171 |
|
}, |
|
"custom|mmlu_cloze:global_facts|0": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"custom|mmlu_cloze:high_school_biology|0": { |
|
"acc": 0.24193548387096775, |
|
"acc_stderr": 0.024362599693031103, |
|
"acc_norm": 0.25806451612903225, |
|
"acc_norm_stderr": 0.024892469172462833 |
|
}, |
|
"custom|mmlu_cloze:high_school_chemistry|0": { |
|
"acc": 0.18719211822660098, |
|
"acc_stderr": 0.027444924966882618, |
|
"acc_norm": 0.20689655172413793, |
|
"acc_norm_stderr": 0.028501378167893946 |
|
}, |
|
"custom|mmlu_cloze:high_school_computer_science|0": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"custom|mmlu_cloze:high_school_european_history|0": { |
|
"acc": 0.18787878787878787, |
|
"acc_stderr": 0.030501934059429144, |
|
"acc_norm": 0.24848484848484848, |
|
"acc_norm_stderr": 0.03374402644139404 |
|
}, |
|
"custom|mmlu_cloze:high_school_geography|0": { |
|
"acc": 0.25252525252525254, |
|
"acc_stderr": 0.030954055470365907, |
|
"acc_norm": 0.30808080808080807, |
|
"acc_norm_stderr": 0.03289477330098614 |
|
}, |
|
"custom|mmlu_cloze:high_school_government_and_politics|0": { |
|
"acc": 0.2694300518134715, |
|
"acc_stderr": 0.03201867122877794, |
|
"acc_norm": 0.34196891191709844, |
|
"acc_norm_stderr": 0.03423465100104284 |
|
}, |
|
"custom|mmlu_cloze:high_school_macroeconomics|0": { |
|
"acc": 0.2641025641025641, |
|
"acc_stderr": 0.022352193737453282, |
|
"acc_norm": 0.26153846153846155, |
|
"acc_norm_stderr": 0.022282141204204426 |
|
}, |
|
"custom|mmlu_cloze:high_school_mathematics|0": { |
|
"acc": 0.15185185185185185, |
|
"acc_stderr": 0.021881130957380465, |
|
"acc_norm": 0.1814814814814815, |
|
"acc_norm_stderr": 0.02349926466940728 |
|
}, |
|
"custom|mmlu_cloze:high_school_microeconomics|0": { |
|
"acc": 0.18067226890756302, |
|
"acc_stderr": 0.02499196496660075, |
|
"acc_norm": 0.33613445378151263, |
|
"acc_norm_stderr": 0.030684737115135356 |
|
}, |
|
"custom|mmlu_cloze:high_school_physics|0": { |
|
"acc": 0.2251655629139073, |
|
"acc_stderr": 0.03410435282008936, |
|
"acc_norm": 0.23841059602649006, |
|
"acc_norm_stderr": 0.0347918557259966 |
|
}, |
|
"custom|mmlu_cloze:high_school_psychology|0": { |
|
"acc": 0.29908256880733947, |
|
"acc_stderr": 0.019630417285415175, |
|
"acc_norm": 0.29908256880733947, |
|
"acc_norm_stderr": 0.019630417285415168 |
|
}, |
|
"custom|mmlu_cloze:high_school_statistics|0": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.029531221160930918, |
|
"acc_norm": 0.2916666666666667, |
|
"acc_norm_stderr": 0.03099866630456053 |
|
}, |
|
"custom|mmlu_cloze:high_school_us_history|0": { |
|
"acc": 0.20098039215686275, |
|
"acc_stderr": 0.02812597226565437, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.03149328104507957 |
|
}, |
|
"custom|mmlu_cloze:high_school_world_history|0": { |
|
"acc": 0.189873417721519, |
|
"acc_stderr": 0.025530100460233494, |
|
"acc_norm": 0.2742616033755274, |
|
"acc_norm_stderr": 0.029041333510598046 |
|
}, |
|
"custom|mmlu_cloze:human_aging|0": { |
|
"acc": 0.34080717488789236, |
|
"acc_stderr": 0.0318114974705536, |
|
"acc_norm": 0.2914798206278027, |
|
"acc_norm_stderr": 0.03050028317654591 |
|
}, |
|
"custom|mmlu_cloze:human_sexuality|0": { |
|
"acc": 0.33587786259541985, |
|
"acc_stderr": 0.041423137719966634, |
|
"acc_norm": 0.2824427480916031, |
|
"acc_norm_stderr": 0.03948406125768361 |
|
}, |
|
"custom|mmlu_cloze:international_law|0": { |
|
"acc": 0.12396694214876033, |
|
"acc_stderr": 0.030083098716035227, |
|
"acc_norm": 0.21487603305785125, |
|
"acc_norm_stderr": 0.03749492448709698 |
|
}, |
|
"custom|mmlu_cloze:jurisprudence|0": { |
|
"acc": 0.19444444444444445, |
|
"acc_stderr": 0.038260763248848646, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"custom|mmlu_cloze:logical_fallacies|0": { |
|
"acc": 0.2331288343558282, |
|
"acc_stderr": 0.03322015795776741, |
|
"acc_norm": 0.2822085889570552, |
|
"acc_norm_stderr": 0.03536117886664743 |
|
}, |
|
"custom|mmlu_cloze:machine_learning|0": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.04464285714285714, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.042466243366976256 |
|
}, |
|
"custom|mmlu_cloze:management|0": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690879, |
|
"acc_norm": 0.3106796116504854, |
|
"acc_norm_stderr": 0.0458212416016155 |
|
}, |
|
"custom|mmlu_cloze:marketing|0": { |
|
"acc": 0.3504273504273504, |
|
"acc_stderr": 0.031256108244218796, |
|
"acc_norm": 0.32905982905982906, |
|
"acc_norm_stderr": 0.030782321577688163 |
|
}, |
|
"custom|mmlu_cloze:medical_genetics|0": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"custom|mmlu_cloze:miscellaneous|0": { |
|
"acc": 0.31545338441890164, |
|
"acc_stderr": 0.016617501738763394, |
|
"acc_norm": 0.31928480204342274, |
|
"acc_norm_stderr": 0.016671261749538736 |
|
}, |
|
"custom|mmlu_cloze:moral_disputes|0": { |
|
"acc": 0.2514450867052023, |
|
"acc_stderr": 0.023357365785874037, |
|
"acc_norm": 0.21098265895953758, |
|
"acc_norm_stderr": 0.021966309947043117 |
|
}, |
|
"custom|mmlu_cloze:moral_scenarios|0": { |
|
"acc": 0.23798882681564246, |
|
"acc_stderr": 0.014242630070574915, |
|
"acc_norm": 0.2759776536312849, |
|
"acc_norm_stderr": 0.014950103002475353 |
|
}, |
|
"custom|mmlu_cloze:nutrition|0": { |
|
"acc": 0.21895424836601307, |
|
"acc_stderr": 0.02367908986180772, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.02582916327275748 |
|
}, |
|
"custom|mmlu_cloze:philosophy|0": { |
|
"acc": 0.26366559485530544, |
|
"acc_stderr": 0.02502553850053234, |
|
"acc_norm": 0.2733118971061093, |
|
"acc_norm_stderr": 0.02531176597542612 |
|
}, |
|
"custom|mmlu_cloze:prehistory|0": { |
|
"acc": 0.30246913580246915, |
|
"acc_stderr": 0.02555765398186806, |
|
"acc_norm": 0.21604938271604937, |
|
"acc_norm_stderr": 0.022899162918445796 |
|
}, |
|
"custom|mmlu_cloze:professional_accounting|0": { |
|
"acc": 0.2695035460992908, |
|
"acc_stderr": 0.026469036818590627, |
|
"acc_norm": 0.2624113475177305, |
|
"acc_norm_stderr": 0.026244920349843007 |
|
}, |
|
"custom|mmlu_cloze:professional_law|0": { |
|
"acc": 0.23598435462842243, |
|
"acc_stderr": 0.010844802669662684, |
|
"acc_norm": 0.24837027379400262, |
|
"acc_norm_stderr": 0.011035212598034505 |
|
}, |
|
"custom|mmlu_cloze:professional_medicine|0": { |
|
"acc": 0.23161764705882354, |
|
"acc_stderr": 0.025626533803777562, |
|
"acc_norm": 0.27205882352941174, |
|
"acc_norm_stderr": 0.027033041151681456 |
|
}, |
|
"custom|mmlu_cloze:professional_psychology|0": { |
|
"acc": 0.23202614379084968, |
|
"acc_stderr": 0.017077373377857002, |
|
"acc_norm": 0.2696078431372549, |
|
"acc_norm_stderr": 0.017952449196987862 |
|
}, |
|
"custom|mmlu_cloze:public_relations|0": { |
|
"acc": 0.38181818181818183, |
|
"acc_stderr": 0.046534298079135075, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.0389509101572414 |
|
}, |
|
"custom|mmlu_cloze:security_studies|0": { |
|
"acc": 0.3224489795918367, |
|
"acc_stderr": 0.029923100563683913, |
|
"acc_norm": 0.1836734693877551, |
|
"acc_norm_stderr": 0.024789071332007615 |
|
}, |
|
"custom|mmlu_cloze:sociology|0": { |
|
"acc": 0.23880597014925373, |
|
"acc_stderr": 0.03014777593540922, |
|
"acc_norm": 0.23880597014925373, |
|
"acc_norm_stderr": 0.030147775935409224 |
|
}, |
|
"custom|mmlu_cloze:us_foreign_policy|0": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"custom|mmlu_cloze:virology|0": { |
|
"acc": 0.23493975903614459, |
|
"acc_stderr": 0.03300533186128922, |
|
"acc_norm": 0.30120481927710846, |
|
"acc_norm_stderr": 0.035716092300534796 |
|
}, |
|
"custom|mmlu_cloze:world_religions|0": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03188578017686398, |
|
"acc_norm": 0.27485380116959063, |
|
"acc_norm_stderr": 0.03424042924691584 |
|
}, |
|
"custom|mmlu_mc:abstract_algebra|0": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932268, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932268 |
|
}, |
|
"custom|mmlu_mc:anatomy|0": { |
|
"acc": 0.17777777777777778, |
|
"acc_stderr": 0.033027898599017176, |
|
"acc_norm": 0.17777777777777778, |
|
"acc_norm_stderr": 0.033027898599017176 |
|
}, |
|
"custom|mmlu_mc:astronomy|0": { |
|
"acc": 0.18421052631578946, |
|
"acc_stderr": 0.0315469804508223, |
|
"acc_norm": 0.18421052631578946, |
|
"acc_norm_stderr": 0.0315469804508223 |
|
}, |
|
"custom|mmlu_mc:business_ethics|0": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"custom|mmlu_mc:clinical_knowledge|0": { |
|
"acc": 0.2037735849056604, |
|
"acc_stderr": 0.024790784501775406, |
|
"acc_norm": 0.2037735849056604, |
|
"acc_norm_stderr": 0.024790784501775406 |
|
}, |
|
"custom|mmlu_mc:college_biology|0": { |
|
"acc": 0.2361111111111111, |
|
"acc_stderr": 0.03551446610810826, |
|
"acc_norm": 0.2361111111111111, |
|
"acc_norm_stderr": 0.03551446610810826 |
|
}, |
|
"custom|mmlu_mc:college_chemistry|0": { |
|
"acc": 0.14, |
|
"acc_stderr": 0.03487350880197771, |
|
"acc_norm": 0.14, |
|
"acc_norm_stderr": 0.03487350880197771 |
|
}, |
|
"custom|mmlu_mc:college_computer_science|0": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"custom|mmlu_mc:college_mathematics|0": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"custom|mmlu_mc:college_medicine|0": { |
|
"acc": 0.20809248554913296, |
|
"acc_stderr": 0.03095289021774988, |
|
"acc_norm": 0.20809248554913296, |
|
"acc_norm_stderr": 0.03095289021774988 |
|
}, |
|
"custom|mmlu_mc:college_physics|0": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237654, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237654 |
|
}, |
|
"custom|mmlu_mc:computer_security|0": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"custom|mmlu_mc:conceptual_physics|0": { |
|
"acc": 0.26382978723404255, |
|
"acc_stderr": 0.028809989854102973, |
|
"acc_norm": 0.26382978723404255, |
|
"acc_norm_stderr": 0.028809989854102973 |
|
}, |
|
"custom|mmlu_mc:econometrics|0": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"custom|mmlu_mc:electrical_engineering|0": { |
|
"acc": 0.25517241379310346, |
|
"acc_stderr": 0.03632984052707842, |
|
"acc_norm": 0.25517241379310346, |
|
"acc_norm_stderr": 0.03632984052707842 |
|
}, |
|
"custom|mmlu_mc:elementary_mathematics|0": { |
|
"acc": 0.20899470899470898, |
|
"acc_stderr": 0.02094048156533486, |
|
"acc_norm": 0.20899470899470898, |
|
"acc_norm_stderr": 0.02094048156533486 |
|
}, |
|
"custom|mmlu_mc:formal_logic|0": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"custom|mmlu_mc:global_facts|0": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.038612291966536934, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.038612291966536934 |
|
}, |
|
"custom|mmlu_mc:high_school_biology|0": { |
|
"acc": 0.1870967741935484, |
|
"acc_stderr": 0.022185710092252252, |
|
"acc_norm": 0.1870967741935484, |
|
"acc_norm_stderr": 0.022185710092252252 |
|
}, |
|
"custom|mmlu_mc:high_school_chemistry|0": { |
|
"acc": 0.19704433497536947, |
|
"acc_stderr": 0.02798672466673622, |
|
"acc_norm": 0.19704433497536947, |
|
"acc_norm_stderr": 0.02798672466673622 |
|
}, |
|
"custom|mmlu_mc:high_school_computer_science|0": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"custom|mmlu_mc:high_school_european_history|0": { |
|
"acc": 0.23030303030303031, |
|
"acc_stderr": 0.03287666758603488, |
|
"acc_norm": 0.23030303030303031, |
|
"acc_norm_stderr": 0.03287666758603488 |
|
}, |
|
"custom|mmlu_mc:high_school_geography|0": { |
|
"acc": 0.17676767676767677, |
|
"acc_stderr": 0.027178752639044915, |
|
"acc_norm": 0.17676767676767677, |
|
"acc_norm_stderr": 0.027178752639044915 |
|
}, |
|
"custom|mmlu_mc:high_school_government_and_politics|0": { |
|
"acc": 0.20725388601036268, |
|
"acc_stderr": 0.029252823291803613, |
|
"acc_norm": 0.20725388601036268, |
|
"acc_norm_stderr": 0.029252823291803613 |
|
}, |
|
"custom|mmlu_mc:high_school_macroeconomics|0": { |
|
"acc": 0.19743589743589743, |
|
"acc_stderr": 0.020182646968674844, |
|
"acc_norm": 0.19743589743589743, |
|
"acc_norm_stderr": 0.020182646968674844 |
|
}, |
|
"custom|mmlu_mc:high_school_mathematics|0": { |
|
"acc": 0.2111111111111111, |
|
"acc_stderr": 0.024882116857655078, |
|
"acc_norm": 0.2111111111111111, |
|
"acc_norm_stderr": 0.024882116857655078 |
|
}, |
|
"custom|mmlu_mc:high_school_microeconomics|0": { |
|
"acc": 0.19747899159663865, |
|
"acc_stderr": 0.025859164122051463, |
|
"acc_norm": 0.19747899159663865, |
|
"acc_norm_stderr": 0.025859164122051463 |
|
}, |
|
"custom|mmlu_mc:high_school_physics|0": { |
|
"acc": 0.1986754966887417, |
|
"acc_stderr": 0.03257847384436776, |
|
"acc_norm": 0.1986754966887417, |
|
"acc_norm_stderr": 0.03257847384436776 |
|
}, |
|
"custom|mmlu_mc:high_school_psychology|0": { |
|
"acc": 0.1926605504587156, |
|
"acc_stderr": 0.016909276884936094, |
|
"acc_norm": 0.1926605504587156, |
|
"acc_norm_stderr": 0.016909276884936094 |
|
}, |
|
"custom|mmlu_mc:high_school_statistics|0": { |
|
"acc": 0.14814814814814814, |
|
"acc_stderr": 0.024227629273728356, |
|
"acc_norm": 0.14814814814814814, |
|
"acc_norm_stderr": 0.024227629273728356 |
|
}, |
|
"custom|mmlu_mc:high_school_us_history|0": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.03058759135160425, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.03058759135160425 |
|
}, |
|
"custom|mmlu_mc:high_school_world_history|0": { |
|
"acc": 0.25316455696202533, |
|
"acc_stderr": 0.028304657943035282, |
|
"acc_norm": 0.25316455696202533, |
|
"acc_norm_stderr": 0.028304657943035282 |
|
}, |
|
"custom|mmlu_mc:human_aging|0": { |
|
"acc": 0.3094170403587444, |
|
"acc_stderr": 0.031024411740572203, |
|
"acc_norm": 0.3094170403587444, |
|
"acc_norm_stderr": 0.031024411740572203 |
|
}, |
|
"custom|mmlu_mc:human_sexuality|0": { |
|
"acc": 0.26717557251908397, |
|
"acc_stderr": 0.038808483010823944, |
|
"acc_norm": 0.26717557251908397, |
|
"acc_norm_stderr": 0.038808483010823944 |
|
}, |
|
"custom|mmlu_mc:international_law|0": { |
|
"acc": 0.2396694214876033, |
|
"acc_stderr": 0.03896878985070417, |
|
"acc_norm": 0.2396694214876033, |
|
"acc_norm_stderr": 0.03896878985070417 |
|
}, |
|
"custom|mmlu_mc:jurisprudence|0": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052192, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052192 |
|
}, |
|
"custom|mmlu_mc:logical_fallacies|0": { |
|
"acc": 0.22085889570552147, |
|
"acc_stderr": 0.032591773927421776, |
|
"acc_norm": 0.22085889570552147, |
|
"acc_norm_stderr": 0.032591773927421776 |
|
}, |
|
"custom|mmlu_mc:machine_learning|0": { |
|
"acc": 0.32142857142857145, |
|
"acc_stderr": 0.04432804055291519, |
|
"acc_norm": 0.32142857142857145, |
|
"acc_norm_stderr": 0.04432804055291519 |
|
}, |
|
"custom|mmlu_mc:management|0": { |
|
"acc": 0.17475728155339806, |
|
"acc_stderr": 0.037601780060266224, |
|
"acc_norm": 0.17475728155339806, |
|
"acc_norm_stderr": 0.037601780060266224 |
|
}, |
|
"custom|mmlu_mc:marketing|0": { |
|
"acc": 0.2863247863247863, |
|
"acc_stderr": 0.029614323690456645, |
|
"acc_norm": 0.2863247863247863, |
|
"acc_norm_stderr": 0.029614323690456645 |
|
}, |
|
"custom|mmlu_mc:medical_genetics|0": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"custom|mmlu_mc:miscellaneous|0": { |
|
"acc": 0.23627075351213284, |
|
"acc_stderr": 0.015190473717037497, |
|
"acc_norm": 0.23627075351213284, |
|
"acc_norm_stderr": 0.015190473717037497 |
|
}, |
|
"custom|mmlu_mc:moral_disputes|0": { |
|
"acc": 0.2398843930635838, |
|
"acc_stderr": 0.02298959254312357, |
|
"acc_norm": 0.2398843930635838, |
|
"acc_norm_stderr": 0.02298959254312357 |
|
}, |
|
"custom|mmlu_mc:moral_scenarios|0": { |
|
"acc": 0.23798882681564246, |
|
"acc_stderr": 0.014242630070574915, |
|
"acc_norm": 0.23798882681564246, |
|
"acc_norm_stderr": 0.014242630070574915 |
|
}, |
|
"custom|mmlu_mc:nutrition|0": { |
|
"acc": 0.20915032679738563, |
|
"acc_stderr": 0.02328768531233481, |
|
"acc_norm": 0.20915032679738563, |
|
"acc_norm_stderr": 0.02328768531233481 |
|
}, |
|
"custom|mmlu_mc:philosophy|0": { |
|
"acc": 0.18971061093247588, |
|
"acc_stderr": 0.022268196258783228, |
|
"acc_norm": 0.18971061093247588, |
|
"acc_norm_stderr": 0.022268196258783228 |
|
}, |
|
"custom|mmlu_mc:prehistory|0": { |
|
"acc": 0.22530864197530864, |
|
"acc_stderr": 0.02324620264781975, |
|
"acc_norm": 0.22530864197530864, |
|
"acc_norm_stderr": 0.02324620264781975 |
|
}, |
|
"custom|mmlu_mc:professional_accounting|0": { |
|
"acc": 0.24468085106382978, |
|
"acc_stderr": 0.02564555362226673, |
|
"acc_norm": 0.24468085106382978, |
|
"acc_norm_stderr": 0.02564555362226673 |
|
}, |
|
"custom|mmlu_mc:professional_law|0": { |
|
"acc": 0.24445893089960888, |
|
"acc_stderr": 0.010976425013113902, |
|
"acc_norm": 0.24445893089960888, |
|
"acc_norm_stderr": 0.010976425013113902 |
|
}, |
|
"custom|mmlu_mc:professional_medicine|0": { |
|
"acc": 0.20220588235294118, |
|
"acc_stderr": 0.024398192986654924, |
|
"acc_norm": 0.20220588235294118, |
|
"acc_norm_stderr": 0.024398192986654924 |
|
}, |
|
"custom|mmlu_mc:professional_psychology|0": { |
|
"acc": 0.25326797385620914, |
|
"acc_stderr": 0.017593486895366835, |
|
"acc_norm": 0.25326797385620914, |
|
"acc_norm_stderr": 0.017593486895366835 |
|
}, |
|
"custom|mmlu_mc:public_relations|0": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03955932861795833, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03955932861795833 |
|
}, |
|
"custom|mmlu_mc:security_studies|0": { |
|
"acc": 0.19183673469387755, |
|
"acc_stderr": 0.025206963154225392, |
|
"acc_norm": 0.19183673469387755, |
|
"acc_norm_stderr": 0.025206963154225392 |
|
}, |
|
"custom|mmlu_mc:sociology|0": { |
|
"acc": 0.24875621890547264, |
|
"acc_stderr": 0.030567675938916707, |
|
"acc_norm": 0.24875621890547264, |
|
"acc_norm_stderr": 0.030567675938916707 |
|
}, |
|
"custom|mmlu_mc:us_foreign_policy|0": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"custom|mmlu_mc:virology|0": { |
|
"acc": 0.2891566265060241, |
|
"acc_stderr": 0.03529486801511115, |
|
"acc_norm": 0.2891566265060241, |
|
"acc_norm_stderr": 0.03529486801511115 |
|
}, |
|
"custom|mmlu_mc:world_religions|0": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.035650796707083106, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.035650796707083106 |
|
}, |
|
"custom|mmlu_pro_cloze|0": { |
|
"acc": 0.08477393617021277, |
|
"acc_stderr": 0.002539478267781077, |
|
"acc_norm": 0.07446808510638298, |
|
"acc_norm_stderr": 0.0023934806969786856 |
|
}, |
|
"custom|openbookqa|0": { |
|
"acc": 0.196, |
|
"acc_stderr": 0.017770751227744856, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.020704041021724805 |
|
}, |
|
"custom|piqa|0": { |
|
"acc": 0.6164309031556039, |
|
"acc_stderr": 0.011345128734116274, |
|
"acc_norm": 0.6109902067464635, |
|
"acc_norm_stderr": 0.011374774974447468 |
|
}, |
|
"custom|siqa|0": { |
|
"acc": 0.3474923234390993, |
|
"acc_stderr": 0.010774926057105855, |
|
"acc_norm": 0.4022517911975435, |
|
"acc_norm_stderr": 0.01109575895130807 |
|
}, |
|
"custom|winogrande|0": { |
|
"acc": 0.5217048145224941, |
|
"acc_stderr": 0.014039239216484633, |
|
"acc_norm": 0.5090765588003157, |
|
"acc_norm_stderr": 0.014050170094497707 |
|
}, |
|
"custom|gsm8k|5": { |
|
"qem": 0.0, |
|
"qem_stderr": 0.0 |
|
}, |
|
"custom|trivia_qa|0": { |
|
"qem": 0.02067543468568881, |
|
"qem_stderr": 0.001062290065972327 |
|
}, |
|
"custom|arc:_average|0": { |
|
"acc": 0.39964204042702334, |
|
"acc_stderr": 0.011138818645564484, |
|
"acc_norm": 0.38677876604497763, |
|
"acc_norm_stderr": 0.011443478008584354 |
|
}, |
|
"custom|mmlu_cloze:_average|0": { |
|
"acc": 0.2519515926895652, |
|
"acc_stderr": 0.03216451222160932, |
|
"acc_norm": 0.26617775276148775, |
|
"acc_norm_stderr": 0.03290929767916069 |
|
}, |
|
"custom|mmlu_mc:_average|0": { |
|
"acc": 0.23140608269805896, |
|
"acc_stderr": 0.03147532325432879, |
|
"acc_norm": 0.23140608269805896, |
|
"acc_norm_stderr": 0.03147532325432879 |
|
}, |
|
"all": { |
|
"acc": 0.24957744996997022, |
|
"acc_stderr": 0.03027535954982755, |
|
"acc_norm": 0.2572518681233832, |
|
"acc_norm_stderr": 0.030652543223254915, |
|
"qem": 0.010337717342844405, |
|
"qem_stderr": 0.0005311450329861635 |
|
} |
|
}, |
|
"versions": { |
|
"custom|arc:challenge|0": 0, |
|
"custom|arc:easy|0": 0, |
|
"custom|commonsense_qa|0": 0, |
|
"custom|gsm8k|5": 0, |
|
"custom|hellaswag|0": 0, |
|
"custom|mmlu_cloze:abstract_algebra|0": 0, |
|
"custom|mmlu_cloze:anatomy|0": 0, |
|
"custom|mmlu_cloze:astronomy|0": 0, |
|
"custom|mmlu_cloze:business_ethics|0": 0, |
|
"custom|mmlu_cloze:clinical_knowledge|0": 0, |
|
"custom|mmlu_cloze:college_biology|0": 0, |
|
"custom|mmlu_cloze:college_chemistry|0": 0, |
|
"custom|mmlu_cloze:college_computer_science|0": 0, |
|
"custom|mmlu_cloze:college_mathematics|0": 0, |
|
"custom|mmlu_cloze:college_medicine|0": 0, |
|
"custom|mmlu_cloze:college_physics|0": 0, |
|
"custom|mmlu_cloze:computer_security|0": 0, |
|
"custom|mmlu_cloze:conceptual_physics|0": 0, |
|
"custom|mmlu_cloze:econometrics|0": 0, |
|
"custom|mmlu_cloze:electrical_engineering|0": 0, |
|
"custom|mmlu_cloze:elementary_mathematics|0": 0, |
|
"custom|mmlu_cloze:formal_logic|0": 0, |
|
"custom|mmlu_cloze:global_facts|0": 0, |
|
"custom|mmlu_cloze:high_school_biology|0": 0, |
|
"custom|mmlu_cloze:high_school_chemistry|0": 0, |
|
"custom|mmlu_cloze:high_school_computer_science|0": 0, |
|
"custom|mmlu_cloze:high_school_european_history|0": 0, |
|
"custom|mmlu_cloze:high_school_geography|0": 0, |
|
"custom|mmlu_cloze:high_school_government_and_politics|0": 0, |
|
"custom|mmlu_cloze:high_school_macroeconomics|0": 0, |
|
"custom|mmlu_cloze:high_school_mathematics|0": 0, |
|
"custom|mmlu_cloze:high_school_microeconomics|0": 0, |
|
"custom|mmlu_cloze:high_school_physics|0": 0, |
|
"custom|mmlu_cloze:high_school_psychology|0": 0, |
|
"custom|mmlu_cloze:high_school_statistics|0": 0, |
|
"custom|mmlu_cloze:high_school_us_history|0": 0, |
|
"custom|mmlu_cloze:high_school_world_history|0": 0, |
|
"custom|mmlu_cloze:human_aging|0": 0, |
|
"custom|mmlu_cloze:human_sexuality|0": 0, |
|
"custom|mmlu_cloze:international_law|0": 0, |
|
"custom|mmlu_cloze:jurisprudence|0": 0, |
|
"custom|mmlu_cloze:logical_fallacies|0": 0, |
|
"custom|mmlu_cloze:machine_learning|0": 0, |
|
"custom|mmlu_cloze:management|0": 0, |
|
"custom|mmlu_cloze:marketing|0": 0, |
|
"custom|mmlu_cloze:medical_genetics|0": 0, |
|
"custom|mmlu_cloze:miscellaneous|0": 0, |
|
"custom|mmlu_cloze:moral_disputes|0": 0, |
|
"custom|mmlu_cloze:moral_scenarios|0": 0, |
|
"custom|mmlu_cloze:nutrition|0": 0, |
|
"custom|mmlu_cloze:philosophy|0": 0, |
|
"custom|mmlu_cloze:prehistory|0": 0, |
|
"custom|mmlu_cloze:professional_accounting|0": 0, |
|
"custom|mmlu_cloze:professional_law|0": 0, |
|
"custom|mmlu_cloze:professional_medicine|0": 0, |
|
"custom|mmlu_cloze:professional_psychology|0": 0, |
|
"custom|mmlu_cloze:public_relations|0": 0, |
|
"custom|mmlu_cloze:security_studies|0": 0, |
|
"custom|mmlu_cloze:sociology|0": 0, |
|
"custom|mmlu_cloze:us_foreign_policy|0": 0, |
|
"custom|mmlu_cloze:virology|0": 0, |
|
"custom|mmlu_cloze:world_religions|0": 0, |
|
"custom|mmlu_mc:abstract_algebra|0": 0, |
|
"custom|mmlu_mc:anatomy|0": 0, |
|
"custom|mmlu_mc:astronomy|0": 0, |
|
"custom|mmlu_mc:business_ethics|0": 0, |
|
"custom|mmlu_mc:clinical_knowledge|0": 0, |
|
"custom|mmlu_mc:college_biology|0": 0, |
|
"custom|mmlu_mc:college_chemistry|0": 0, |
|
"custom|mmlu_mc:college_computer_science|0": 0, |
|
"custom|mmlu_mc:college_mathematics|0": 0, |
|
"custom|mmlu_mc:college_medicine|0": 0, |
|
"custom|mmlu_mc:college_physics|0": 0, |
|
"custom|mmlu_mc:computer_security|0": 0, |
|
"custom|mmlu_mc:conceptual_physics|0": 0, |
|
"custom|mmlu_mc:econometrics|0": 0, |
|
"custom|mmlu_mc:electrical_engineering|0": 0, |
|
"custom|mmlu_mc:elementary_mathematics|0": 0, |
|
"custom|mmlu_mc:formal_logic|0": 0, |
|
"custom|mmlu_mc:global_facts|0": 0, |
|
"custom|mmlu_mc:high_school_biology|0": 0, |
|
"custom|mmlu_mc:high_school_chemistry|0": 0, |
|
"custom|mmlu_mc:high_school_computer_science|0": 0, |
|
"custom|mmlu_mc:high_school_european_history|0": 0, |
|
"custom|mmlu_mc:high_school_geography|0": 0, |
|
"custom|mmlu_mc:high_school_government_and_politics|0": 0, |
|
"custom|mmlu_mc:high_school_macroeconomics|0": 0, |
|
"custom|mmlu_mc:high_school_mathematics|0": 0, |
|
"custom|mmlu_mc:high_school_microeconomics|0": 0, |
|
"custom|mmlu_mc:high_school_physics|0": 0, |
|
"custom|mmlu_mc:high_school_psychology|0": 0, |
|
"custom|mmlu_mc:high_school_statistics|0": 0, |
|
"custom|mmlu_mc:high_school_us_history|0": 0, |
|
"custom|mmlu_mc:high_school_world_history|0": 0, |
|
"custom|mmlu_mc:human_aging|0": 0, |
|
"custom|mmlu_mc:human_sexuality|0": 0, |
|
"custom|mmlu_mc:international_law|0": 0, |
|
"custom|mmlu_mc:jurisprudence|0": 0, |
|
"custom|mmlu_mc:logical_fallacies|0": 0, |
|
"custom|mmlu_mc:machine_learning|0": 0, |
|
"custom|mmlu_mc:management|0": 0, |
|
"custom|mmlu_mc:marketing|0": 0, |
|
"custom|mmlu_mc:medical_genetics|0": 0, |
|
"custom|mmlu_mc:miscellaneous|0": 0, |
|
"custom|mmlu_mc:moral_disputes|0": 0, |
|
"custom|mmlu_mc:moral_scenarios|0": 0, |
|
"custom|mmlu_mc:nutrition|0": 0, |
|
"custom|mmlu_mc:philosophy|0": 0, |
|
"custom|mmlu_mc:prehistory|0": 0, |
|
"custom|mmlu_mc:professional_accounting|0": 0, |
|
"custom|mmlu_mc:professional_law|0": 0, |
|
"custom|mmlu_mc:professional_medicine|0": 0, |
|
"custom|mmlu_mc:professional_psychology|0": 0, |
|
"custom|mmlu_mc:public_relations|0": 0, |
|
"custom|mmlu_mc:security_studies|0": 0, |
|
"custom|mmlu_mc:sociology|0": 0, |
|
"custom|mmlu_mc:us_foreign_policy|0": 0, |
|
"custom|mmlu_mc:virology|0": 0, |
|
"custom|mmlu_mc:world_religions|0": 0, |
|
"custom|mmlu_pro_cloze|0": 0, |
|
"custom|openbookqa|0": 0, |
|
"custom|piqa|0": 0, |
|
"custom|siqa|0": 0, |
|
"custom|trivia_qa|0": 0, |
|
"custom|winogrande|0": 0 |
|
}, |
|
"config_tasks": { |
|
"custom|arc:challenge": { |
|
"name": "arc:challenge", |
|
"prompt_function": "arc", |
|
"hf_repo": "ai2_arc", |
|
"hf_subset": "ARC-Challenge", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1172, |
|
"effective_num_docs": 1172, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|arc:easy": { |
|
"name": "arc:easy", |
|
"prompt_function": "arc", |
|
"hf_repo": "ai2_arc", |
|
"hf_subset": "ARC-Easy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 2376, |
|
"effective_num_docs": 2376, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|commonsense_qa": { |
|
"name": "commonsense_qa", |
|
"prompt_function": "commonsense_qa_prompt", |
|
"hf_repo": "commonsense_qa", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1221, |
|
"effective_num_docs": 1221, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|gsm8k": { |
|
"name": "gsm8k", |
|
"prompt_function": "gsm8k", |
|
"hf_repo": "gsm8k", |
|
"hf_subset": "main", |
|
"metric": [ |
|
"quasi_exact_match_gsm8k" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": "random_sampling_from_train", |
|
"generation_size": 256, |
|
"stop_sequence": [ |
|
"Question:", |
|
"Question" |
|
], |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1319, |
|
"effective_num_docs": 1319, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|hellaswag": { |
|
"name": "hellaswag", |
|
"prompt_function": "hellaswag_prompt", |
|
"hf_repo": "hellaswag", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 10042, |
|
"effective_num_docs": 10042, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:abstract_algebra": { |
|
"name": "mmlu_cloze:abstract_algebra", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:anatomy": { |
|
"name": "mmlu_cloze:anatomy", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:astronomy": { |
|
"name": "mmlu_cloze:astronomy", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:business_ethics": { |
|
"name": "mmlu_cloze:business_ethics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:clinical_knowledge": { |
|
"name": "mmlu_cloze:clinical_knowledge", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:college_biology": { |
|
"name": "mmlu_cloze:college_biology", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:college_chemistry": { |
|
"name": "mmlu_cloze:college_chemistry", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:college_computer_science": { |
|
"name": "mmlu_cloze:college_computer_science", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:college_mathematics": { |
|
"name": "mmlu_cloze:college_mathematics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:college_medicine": { |
|
"name": "mmlu_cloze:college_medicine", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:college_physics": { |
|
"name": "mmlu_cloze:college_physics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:computer_security": { |
|
"name": "mmlu_cloze:computer_security", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:conceptual_physics": { |
|
"name": "mmlu_cloze:conceptual_physics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:econometrics": { |
|
"name": "mmlu_cloze:econometrics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:electrical_engineering": { |
|
"name": "mmlu_cloze:electrical_engineering", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:elementary_mathematics": { |
|
"name": "mmlu_cloze:elementary_mathematics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:formal_logic": { |
|
"name": "mmlu_cloze:formal_logic", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:global_facts": { |
|
"name": "mmlu_cloze:global_facts", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_biology": { |
|
"name": "mmlu_cloze:high_school_biology", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 310, |
|
"effective_num_docs": 310, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_chemistry": { |
|
"name": "mmlu_cloze:high_school_chemistry", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_computer_science": { |
|
"name": "mmlu_cloze:high_school_computer_science", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_european_history": { |
|
"name": "mmlu_cloze:high_school_european_history", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 165, |
|
"effective_num_docs": 165, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_geography": { |
|
"name": "mmlu_cloze:high_school_geography", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_government_and_politics": { |
|
"name": "mmlu_cloze:high_school_government_and_politics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_macroeconomics": { |
|
"name": "mmlu_cloze:high_school_macroeconomics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_mathematics": { |
|
"name": "mmlu_cloze:high_school_mathematics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_microeconomics": { |
|
"name": "mmlu_cloze:high_school_microeconomics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_physics": { |
|
"name": "mmlu_cloze:high_school_physics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_psychology": { |
|
"name": "mmlu_cloze:high_school_psychology", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_statistics": { |
|
"name": "mmlu_cloze:high_school_statistics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 216, |
|
"effective_num_docs": 216, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_us_history": { |
|
"name": "mmlu_cloze:high_school_us_history", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_world_history": { |
|
"name": "mmlu_cloze:high_school_world_history", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:human_aging": { |
|
"name": "mmlu_cloze:human_aging", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:human_sexuality": { |
|
"name": "mmlu_cloze:human_sexuality", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:international_law": { |
|
"name": "mmlu_cloze:international_law", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:jurisprudence": { |
|
"name": "mmlu_cloze:jurisprudence", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:logical_fallacies": { |
|
"name": "mmlu_cloze:logical_fallacies", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:machine_learning": { |
|
"name": "mmlu_cloze:machine_learning", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:management": { |
|
"name": "mmlu_cloze:management", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "management", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:marketing": { |
|
"name": "mmlu_cloze:marketing", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:medical_genetics": { |
|
"name": "mmlu_cloze:medical_genetics", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:miscellaneous": { |
|
"name": "mmlu_cloze:miscellaneous", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 783, |
|
"effective_num_docs": 783, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:moral_disputes": { |
|
"name": "mmlu_cloze:moral_disputes", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:moral_scenarios": { |
|
"name": "mmlu_cloze:moral_scenarios", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:nutrition": { |
|
"name": "mmlu_cloze:nutrition", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:philosophy": { |
|
"name": "mmlu_cloze:philosophy", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:prehistory": { |
|
"name": "mmlu_cloze:prehistory", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_accounting": { |
|
"name": "mmlu_cloze:professional_accounting", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 282, |
|
"effective_num_docs": 282, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_law": { |
|
"name": "mmlu_cloze:professional_law", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1534, |
|
"effective_num_docs": 1534, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_medicine": { |
|
"name": "mmlu_cloze:professional_medicine", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_psychology": { |
|
"name": "mmlu_cloze:professional_psychology", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:public_relations": { |
|
"name": "mmlu_cloze:public_relations", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:security_studies": { |
|
"name": "mmlu_cloze:security_studies", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:sociology": { |
|
"name": "mmlu_cloze:sociology", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:us_foreign_policy": { |
|
"name": "mmlu_cloze:us_foreign_policy", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:virology": { |
|
"name": "mmlu_cloze:virology", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_cloze:world_religions": { |
|
"name": "mmlu_cloze:world_religions", |
|
"prompt_function": "mmlu_cloze_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:abstract_algebra": { |
|
"name": "mmlu_mc:abstract_algebra", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:anatomy": { |
|
"name": "mmlu_mc:anatomy", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:astronomy": { |
|
"name": "mmlu_mc:astronomy", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:business_ethics": { |
|
"name": "mmlu_mc:business_ethics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:clinical_knowledge": { |
|
"name": "mmlu_mc:clinical_knowledge", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:college_biology": { |
|
"name": "mmlu_mc:college_biology", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:college_chemistry": { |
|
"name": "mmlu_mc:college_chemistry", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:college_computer_science": { |
|
"name": "mmlu_mc:college_computer_science", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:college_mathematics": { |
|
"name": "mmlu_mc:college_mathematics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:college_medicine": { |
|
"name": "mmlu_mc:college_medicine", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:college_physics": { |
|
"name": "mmlu_mc:college_physics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:computer_security": { |
|
"name": "mmlu_mc:computer_security", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:conceptual_physics": { |
|
"name": "mmlu_mc:conceptual_physics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:econometrics": { |
|
"name": "mmlu_mc:econometrics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:electrical_engineering": { |
|
"name": "mmlu_mc:electrical_engineering", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:elementary_mathematics": { |
|
"name": "mmlu_mc:elementary_mathematics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:formal_logic": { |
|
"name": "mmlu_mc:formal_logic", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:global_facts": { |
|
"name": "mmlu_mc:global_facts", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_biology": { |
|
"name": "mmlu_mc:high_school_biology", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 310, |
|
"effective_num_docs": 310, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_chemistry": { |
|
"name": "mmlu_mc:high_school_chemistry", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_computer_science": { |
|
"name": "mmlu_mc:high_school_computer_science", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_european_history": { |
|
"name": "mmlu_mc:high_school_european_history", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 165, |
|
"effective_num_docs": 165, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_geography": { |
|
"name": "mmlu_mc:high_school_geography", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_government_and_politics": { |
|
"name": "mmlu_mc:high_school_government_and_politics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_macroeconomics": { |
|
"name": "mmlu_mc:high_school_macroeconomics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_mathematics": { |
|
"name": "mmlu_mc:high_school_mathematics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_microeconomics": { |
|
"name": "mmlu_mc:high_school_microeconomics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_physics": { |
|
"name": "mmlu_mc:high_school_physics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_psychology": { |
|
"name": "mmlu_mc:high_school_psychology", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_statistics": { |
|
"name": "mmlu_mc:high_school_statistics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 216, |
|
"effective_num_docs": 216, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_us_history": { |
|
"name": "mmlu_mc:high_school_us_history", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_world_history": { |
|
"name": "mmlu_mc:high_school_world_history", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:human_aging": { |
|
"name": "mmlu_mc:human_aging", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:human_sexuality": { |
|
"name": "mmlu_mc:human_sexuality", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:international_law": { |
|
"name": "mmlu_mc:international_law", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:jurisprudence": { |
|
"name": "mmlu_mc:jurisprudence", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:logical_fallacies": { |
|
"name": "mmlu_mc:logical_fallacies", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:machine_learning": { |
|
"name": "mmlu_mc:machine_learning", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:management": { |
|
"name": "mmlu_mc:management", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "management", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:marketing": { |
|
"name": "mmlu_mc:marketing", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:medical_genetics": { |
|
"name": "mmlu_mc:medical_genetics", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:miscellaneous": { |
|
"name": "mmlu_mc:miscellaneous", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 783, |
|
"effective_num_docs": 783, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:moral_disputes": { |
|
"name": "mmlu_mc:moral_disputes", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:moral_scenarios": { |
|
"name": "mmlu_mc:moral_scenarios", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:nutrition": { |
|
"name": "mmlu_mc:nutrition", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:philosophy": { |
|
"name": "mmlu_mc:philosophy", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:prehistory": { |
|
"name": "mmlu_mc:prehistory", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:professional_accounting": { |
|
"name": "mmlu_mc:professional_accounting", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 282, |
|
"effective_num_docs": 282, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:professional_law": { |
|
"name": "mmlu_mc:professional_law", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1534, |
|
"effective_num_docs": 1534, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:professional_medicine": { |
|
"name": "mmlu_mc:professional_medicine", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:professional_psychology": { |
|
"name": "mmlu_mc:professional_psychology", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:public_relations": { |
|
"name": "mmlu_mc:public_relations", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:security_studies": { |
|
"name": "mmlu_mc:security_studies", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:sociology": { |
|
"name": "mmlu_mc:sociology", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:us_foreign_policy": { |
|
"name": "mmlu_mc:us_foreign_policy", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:virology": { |
|
"name": "mmlu_mc:virology", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_mc:world_religions": { |
|
"name": "mmlu_mc:world_religions", |
|
"prompt_function": "mmlu_mc_prompt", |
|
"hf_repo": "lighteval/mmlu", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "dev", |
|
"few_shots_select": null, |
|
"generation_size": 1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|mmlu_pro_cloze": { |
|
"name": "mmlu_pro_cloze", |
|
"prompt_function": "mmlu_pro_cloze_prompt", |
|
"hf_repo": "TIGER-Lab/MMLU-Pro", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 12032, |
|
"effective_num_docs": 12032, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|openbookqa": { |
|
"name": "openbookqa", |
|
"prompt_function": "openbookqa", |
|
"hf_repo": "openbookqa", |
|
"hf_subset": "main", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 500, |
|
"effective_num_docs": 500, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|piqa": { |
|
"name": "piqa", |
|
"prompt_function": "piqa_harness", |
|
"hf_repo": "piqa", |
|
"hf_subset": "plain_text", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1838, |
|
"effective_num_docs": 1838, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|siqa": { |
|
"name": "siqa", |
|
"prompt_function": "siqa_prompt", |
|
"hf_repo": "lighteval/siqa", |
|
"hf_subset": "default", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1954, |
|
"effective_num_docs": 1954, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|trivia_qa": { |
|
"name": "trivia_qa", |
|
"prompt_function": "triviaqa", |
|
"hf_repo": "mandarjoshi/trivia_qa", |
|
"hf_subset": "rc.nocontext", |
|
"metric": [ |
|
"quasi_exact_match_triviaqa" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": "random_sampling_from_train", |
|
"generation_size": 20, |
|
"stop_sequence": [ |
|
"\n", |
|
".", |
|
"," |
|
], |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 17944, |
|
"effective_num_docs": 17944, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
}, |
|
"custom|winogrande": { |
|
"name": "winogrande", |
|
"prompt_function": "winogrande", |
|
"hf_repo": "winogrande", |
|
"hf_subset": "winogrande_xl", |
|
"metric": [ |
|
"loglikelihood_acc", |
|
"loglikelihood_acc_norm_nospace" |
|
], |
|
"hf_avail_splits": [ |
|
"train", |
|
"validation", |
|
"test" |
|
], |
|
"evaluation_splits": [ |
|
"validation" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": null, |
|
"stop_sequence": null, |
|
"output_regex": null, |
|
"num_samples": null, |
|
"frozen": false, |
|
"suite": [ |
|
"custom" |
|
], |
|
"original_num_docs": 1267, |
|
"effective_num_docs": 1267, |
|
"trust_dataset": true, |
|
"must_remove_duplicate_docs": null, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"custom|arc:challenge|0": { |
|
"hashes": { |
|
"hash_examples": "17b0cae357c0259e", |
|
"hash_full_prompts": "17b0cae357c0259e", |
|
"hash_input_tokens": "4ecabb2d7f956267", |
|
"hash_cont_tokens": "382807b255d8e654" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1172, |
|
"padded": 4687, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|arc:easy|0": { |
|
"hashes": { |
|
"hash_examples": "63703c3cdff55bec", |
|
"hash_full_prompts": "63703c3cdff55bec", |
|
"hash_input_tokens": "ae39a87709c09046", |
|
"hash_cont_tokens": "e5bfd0dd9f21e228" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2376, |
|
"padded": 9501, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|commonsense_qa|0": { |
|
"hashes": { |
|
"hash_examples": "2e514c541df5ae5b", |
|
"hash_full_prompts": "2e514c541df5ae5b", |
|
"hash_input_tokens": "5ea2a6658ad43123", |
|
"hash_cont_tokens": "d72bd296750fdb4d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1221, |
|
"padded": 6105, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|hellaswag|0": { |
|
"hashes": { |
|
"hash_examples": "31985c805c3a737e", |
|
"hash_full_prompts": "31985c805c3a737e", |
|
"hash_input_tokens": "9481581cb2f9d3bb", |
|
"hash_cont_tokens": "170d770fcde8ba33" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 10042, |
|
"padded": 40168, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "ff00c12a680621ba", |
|
"hash_full_prompts": "ff00c12a680621ba", |
|
"hash_input_tokens": "825a07b3f48f0cfb", |
|
"hash_cont_tokens": "d0c19f27d7deb80f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "7f9c9593991d6727", |
|
"hash_full_prompts": "7f9c9593991d6727", |
|
"hash_input_tokens": "d7d6de8abfc50ccc", |
|
"hash_cont_tokens": "42187024d0c75d7d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 540, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "ff5985a306787836", |
|
"hash_full_prompts": "ff5985a306787836", |
|
"hash_input_tokens": "6e1b17a96a4ecdec", |
|
"hash_cont_tokens": "74bf19f47d798d4e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 608, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "e3fe02a23d08c2d0", |
|
"hash_full_prompts": "e3fe02a23d08c2d0", |
|
"hash_input_tokens": "31df81de8053a502", |
|
"hash_cont_tokens": "3a9f7cebab894907" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "9b962be8e1615cd5", |
|
"hash_full_prompts": "9b962be8e1615cd5", |
|
"hash_input_tokens": "2a4bfffc23f19d96", |
|
"hash_cont_tokens": "302ae0fc9cb8523d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1060, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "17b4ca841de3a2a3", |
|
"hash_full_prompts": "17b4ca841de3a2a3", |
|
"hash_input_tokens": "3d21280c8de2c7aa", |
|
"hash_cont_tokens": "ef093a5883a6d392" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 576, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "babea49005fd8249", |
|
"hash_full_prompts": "babea49005fd8249", |
|
"hash_input_tokens": "955f58f5d70c94c7", |
|
"hash_cont_tokens": "8efb5376b8e37c39" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "47f4fef1846c8914", |
|
"hash_full_prompts": "47f4fef1846c8914", |
|
"hash_input_tokens": "8179bf2028cd2f04", |
|
"hash_cont_tokens": "c97234a8cd658381" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "4d3686d599963414", |
|
"hash_full_prompts": "4d3686d599963414", |
|
"hash_input_tokens": "c7d240d7cb23b145", |
|
"hash_cont_tokens": "2ba03c3efef40c8a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "7209619ecac6f235", |
|
"hash_full_prompts": "7209619ecac6f235", |
|
"hash_input_tokens": "e35f70f368fc586a", |
|
"hash_cont_tokens": "8ee57a8c6fb00b11" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 692, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "6131b6c60dd7f055", |
|
"hash_full_prompts": "6131b6c60dd7f055", |
|
"hash_input_tokens": "6c2075c9f928a079", |
|
"hash_cont_tokens": "8af2e1696b293e0e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 399, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "99539c9a5bc98a59", |
|
"hash_full_prompts": "99539c9a5bc98a59", |
|
"hash_input_tokens": "8550a8d350932163", |
|
"hash_cont_tokens": "d4773943d95ea3a4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 394, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "4e15015839d00858", |
|
"hash_full_prompts": "4e15015839d00858", |
|
"hash_input_tokens": "173445d4b851c661", |
|
"hash_cont_tokens": "c4bbee0471416c1d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 920, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "fba5c647465e89e0", |
|
"hash_full_prompts": "fba5c647465e89e0", |
|
"hash_input_tokens": "df936adf2f851607", |
|
"hash_cont_tokens": "7a458dddb4002455" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 446, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "2db2ba0cb98cda51", |
|
"hash_full_prompts": "2db2ba0cb98cda51", |
|
"hash_input_tokens": "4b30e518ba867fe9", |
|
"hash_cont_tokens": "c027a21314d9cb39" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 571, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "f231cd5ae05742bb", |
|
"hash_full_prompts": "f231cd5ae05742bb", |
|
"hash_input_tokens": "0126e32933ea886b", |
|
"hash_cont_tokens": "ffbc160045a6c7c7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1467, |
|
"non_padded": 45, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "b69d9607d5da536e", |
|
"hash_full_prompts": "b69d9607d5da536e", |
|
"hash_input_tokens": "b5734fec1a3ed448", |
|
"hash_cont_tokens": "f202160d6490d213" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 500, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "c9f53772e672f6bc", |
|
"hash_full_prompts": "c9f53772e672f6bc", |
|
"hash_input_tokens": "a152fd81b0305203", |
|
"hash_cont_tokens": "173cc0f486d62689" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 377, |
|
"non_padded": 23, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "455027cf6cdd02bc", |
|
"hash_full_prompts": "455027cf6cdd02bc", |
|
"hash_input_tokens": "22d57f0f3025de3b", |
|
"hash_cont_tokens": "0e36fd7d5c0bc1c9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 310, |
|
"padded": 1204, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "95d9caac9edbc34d", |
|
"hash_full_prompts": "95d9caac9edbc34d", |
|
"hash_input_tokens": "6e49e3e72b047a3e", |
|
"hash_cont_tokens": "98be610b601651a3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 796, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "6e44706db3791e51", |
|
"hash_full_prompts": "6e44706db3791e51", |
|
"hash_input_tokens": "d434692dc3d29b06", |
|
"hash_cont_tokens": "9ad1d237a36c27e2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 398, |
|
"non_padded": 2, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "9078fce41897117d", |
|
"hash_full_prompts": "9078fce41897117d", |
|
"hash_input_tokens": "baa5c1ca8f6add95", |
|
"hash_cont_tokens": "1c0bac822924721f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 165, |
|
"padded": 660, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "c68adcc34130a2e8", |
|
"hash_full_prompts": "c68adcc34130a2e8", |
|
"hash_input_tokens": "b3bd3f5acf3bb59b", |
|
"hash_cont_tokens": "9c18c9389334971d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 754, |
|
"non_padded": 38, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "6f839b19e49a0858", |
|
"hash_full_prompts": "6f839b19e49a0858", |
|
"hash_input_tokens": "3612995eb429ca2a", |
|
"hash_cont_tokens": "04fd8615a95849dc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 729, |
|
"non_padded": 43, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "708a5c05e7adb220", |
|
"hash_full_prompts": "708a5c05e7adb220", |
|
"hash_input_tokens": "6573e125588b4a17", |
|
"hash_cont_tokens": "2aebe52941f95aab" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1504, |
|
"non_padded": 56, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "47c621dd61fd7790", |
|
"hash_full_prompts": "47c621dd61fd7790", |
|
"hash_input_tokens": "b5b130940d42765d", |
|
"hash_cont_tokens": "c451e8276a516c39" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1068, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "3c467180d90f6371", |
|
"hash_full_prompts": "3c467180d90f6371", |
|
"hash_input_tokens": "ef39656ea8bf35f0", |
|
"hash_cont_tokens": "d8563ad90149cae5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 909, |
|
"non_padded": 43, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "89a598cdde43be79", |
|
"hash_full_prompts": "89a598cdde43be79", |
|
"hash_input_tokens": "895ceec34881fcf5", |
|
"hash_cont_tokens": "caff417b58be4e70" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 594, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "d8a7bf3f17ec12d0", |
|
"hash_full_prompts": "d8a7bf3f17ec12d0", |
|
"hash_input_tokens": "3f486cd383cd35b5", |
|
"hash_cont_tokens": "2b6a09b8b17fb0eb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2123, |
|
"non_padded": 57, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "995374a6caaa97d6", |
|
"hash_full_prompts": "995374a6caaa97d6", |
|
"hash_input_tokens": "879083119967753d", |
|
"hash_cont_tokens": "0cc2b9ae8dabf046" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 216, |
|
"padded": 859, |
|
"non_padded": 5, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "7893e9d07e34cb37", |
|
"hash_full_prompts": "7893e9d07e34cb37", |
|
"hash_input_tokens": "6a24701391e40ffc", |
|
"hash_cont_tokens": "73cfbeaec4b3aeed" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "48879684e37d1716", |
|
"hash_full_prompts": "48879684e37d1716", |
|
"hash_input_tokens": "3079604970b7d448", |
|
"hash_cont_tokens": "f1cb4346d2b1e292" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "afae8c53bd6e5f44", |
|
"hash_full_prompts": "afae8c53bd6e5f44", |
|
"hash_input_tokens": "a1f16b4821195939", |
|
"hash_cont_tokens": "8d13851bb150eaf9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 856, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "9701f02004912a7a", |
|
"hash_full_prompts": "9701f02004912a7a", |
|
"hash_input_tokens": "832396976eba57e1", |
|
"hash_cont_tokens": "985d7aabfc585133" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 507, |
|
"non_padded": 17, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "47955196de2d2c7a", |
|
"hash_full_prompts": "47955196de2d2c7a", |
|
"hash_input_tokens": "6466394a11f03388", |
|
"hash_cont_tokens": "9c8c1fa7e47c4c90" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 470, |
|
"non_padded": 14, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "a992eac2b8ae8bc4", |
|
"hash_full_prompts": "a992eac2b8ae8bc4", |
|
"hash_input_tokens": "51f18657103513e5", |
|
"hash_cont_tokens": "4331097b412f784f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 413, |
|
"non_padded": 19, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "b0d31ed08f699e6c", |
|
"hash_full_prompts": "b0d31ed08f699e6c", |
|
"hash_input_tokens": "4cbb4ee3c4329a41", |
|
"hash_cont_tokens": "4743dde06acaab43" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 635, |
|
"non_padded": 17, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "dccdef2bae4461a6", |
|
"hash_full_prompts": "dccdef2bae4461a6", |
|
"hash_input_tokens": "27c5b064b051a030", |
|
"hash_cont_tokens": "1f795e6e8a273ecc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 444, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:management|0": { |
|
"hashes": { |
|
"hash_examples": "f600be25303e1fe2", |
|
"hash_full_prompts": "f600be25303e1fe2", |
|
"hash_input_tokens": "90d2cf3e33247cf8", |
|
"hash_cont_tokens": "393a02f5ba8ed8f7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 409, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "1a0df3ae5e306669", |
|
"hash_full_prompts": "1a0df3ae5e306669", |
|
"hash_input_tokens": "a6ad9428f2066c2a", |
|
"hash_cont_tokens": "54499c1050a42bd8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 907, |
|
"non_padded": 29, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "eb87c9cfd9b7c760", |
|
"hash_full_prompts": "eb87c9cfd9b7c760", |
|
"hash_input_tokens": "a2fcd6b69eb20a62", |
|
"hash_cont_tokens": "e81320584373fcab" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 377, |
|
"non_padded": 23, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "f88d724036ba03b7", |
|
"hash_full_prompts": "f88d724036ba03b7", |
|
"hash_input_tokens": "9daf4dd3472a7d1a", |
|
"hash_cont_tokens": "aa1fb18c535ba1ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 783, |
|
"padded": 3055, |
|
"non_padded": 77, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "79782c0823005e7b", |
|
"hash_full_prompts": "79782c0823005e7b", |
|
"hash_input_tokens": "fbede36d813dc634", |
|
"hash_cont_tokens": "bce3e5cbf2757e8a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1318, |
|
"non_padded": 66, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "fe57fdd86442b483", |
|
"hash_full_prompts": "fe57fdd86442b483", |
|
"hash_input_tokens": "31be0a0533a83d95", |
|
"hash_cont_tokens": "c0b9acae579bdfb6" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "421f206f5957e90f", |
|
"hash_full_prompts": "421f206f5957e90f", |
|
"hash_input_tokens": "84a45e53f623efbb", |
|
"hash_cont_tokens": "5ea5cbf5680055ad" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1183, |
|
"non_padded": 41, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "c93073e383957fc4", |
|
"hash_full_prompts": "c93073e383957fc4", |
|
"hash_input_tokens": "e927bccac541dae4", |
|
"hash_cont_tokens": "3bf84495cb489f2f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1203, |
|
"non_padded": 41, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "58ec03e20eae9f90", |
|
"hash_full_prompts": "58ec03e20eae9f90", |
|
"hash_input_tokens": "5a30ecbbc910a94f", |
|
"hash_cont_tokens": "c9ac7804f9ff842d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1236, |
|
"non_padded": 60, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "13f7a6023a118512", |
|
"hash_full_prompts": "13f7a6023a118512", |
|
"hash_input_tokens": "b462d03ce0959e8d", |
|
"hash_cont_tokens": "0d7b8e6abecb4fb3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 282, |
|
"padded": 1122, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "8086d24f4d4e82f4", |
|
"hash_full_prompts": "8086d24f4d4e82f4", |
|
"hash_input_tokens": "1c3e5eae5a47abf1", |
|
"hash_cont_tokens": "c38cfeb8105b5e90" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1534, |
|
"padded": 6118, |
|
"non_padded": 18, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "f0b30b4e786eaeea", |
|
"hash_full_prompts": "f0b30b4e786eaeea", |
|
"hash_input_tokens": "ee6d4d0eb63ed94b", |
|
"hash_cont_tokens": "c23b43a1136d5986" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 1078, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "29fd2b4a194c28ea", |
|
"hash_full_prompts": "29fd2b4a194c28ea", |
|
"hash_input_tokens": "860dc95066e1e596", |
|
"hash_cont_tokens": "c8827a0bd1403cb3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2357, |
|
"non_padded": 91, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "52a84bb75dd812eb", |
|
"hash_full_prompts": "52a84bb75dd812eb", |
|
"hash_input_tokens": "38711d53009ea90d", |
|
"hash_cont_tokens": "d224b48b243de3f4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 424, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "2469ceb06f350432", |
|
"hash_full_prompts": "2469ceb06f350432", |
|
"hash_input_tokens": "4f35af93520ee61e", |
|
"hash_cont_tokens": "fd2c2443a5d9e7ac" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 961, |
|
"non_padded": 19, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "d2060dedb3fc2bea", |
|
"hash_full_prompts": "d2060dedb3fc2bea", |
|
"hash_input_tokens": "ed7e51cb983572ad", |
|
"hash_cont_tokens": "acc70513ba8b6ea2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 759, |
|
"non_padded": 45, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "84e882e740d43f01", |
|
"hash_full_prompts": "84e882e740d43f01", |
|
"hash_input_tokens": "268956b1012c5940", |
|
"hash_cont_tokens": "45532e5b7e0b55e5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 379, |
|
"non_padded": 21, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:virology|0": { |
|
"hashes": { |
|
"hash_examples": "0428d2d277aa56aa", |
|
"hash_full_prompts": "0428d2d277aa56aa", |
|
"hash_input_tokens": "961409bd21ba248a", |
|
"hash_cont_tokens": "8bc228ef11217e91" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 640, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_cloze:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "7e8f045c67ba6ba1", |
|
"hash_full_prompts": "7e8f045c67ba6ba1", |
|
"hash_input_tokens": "bfe903593483aa22", |
|
"hash_cont_tokens": "bc5b5b53a2341fc3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 665, |
|
"non_padded": 19, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "4c76229e00c9c0e9", |
|
"hash_full_prompts": "4c76229e00c9c0e9", |
|
"hash_input_tokens": "7755491d408f0b5c", |
|
"hash_cont_tokens": "0bb4627988c70881" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "6a1f8104dccbd33b", |
|
"hash_full_prompts": "6a1f8104dccbd33b", |
|
"hash_input_tokens": "b95ec4c836880ab8", |
|
"hash_cont_tokens": "fbfb34b68f4e80cc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 536, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "1302effa3a76ce4c", |
|
"hash_full_prompts": "1302effa3a76ce4c", |
|
"hash_input_tokens": "0930c5fc835f8fc3", |
|
"hash_cont_tokens": "f94a14f8612b0081" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 604, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "03cb8bce5336419a", |
|
"hash_full_prompts": "03cb8bce5336419a", |
|
"hash_input_tokens": "ee16d2f0799734ef", |
|
"hash_cont_tokens": "9cafa5a029671fcf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "ffbb9c7b2be257f9", |
|
"hash_full_prompts": "ffbb9c7b2be257f9", |
|
"hash_input_tokens": "ac1bf6729bd8feb6", |
|
"hash_cont_tokens": "85c06001ba520401" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1060, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "3ee77f176f38eb8e", |
|
"hash_full_prompts": "3ee77f176f38eb8e", |
|
"hash_input_tokens": "e6334e1ca4b68674", |
|
"hash_cont_tokens": "147d4bf8722e29b8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 568, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "ce61a69c46d47aeb", |
|
"hash_full_prompts": "ce61a69c46d47aeb", |
|
"hash_input_tokens": "579af862a1802539", |
|
"hash_cont_tokens": "43d344a3a511451c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "32805b52d7d5daab", |
|
"hash_full_prompts": "32805b52d7d5daab", |
|
"hash_input_tokens": "55adcbebb7750997", |
|
"hash_cont_tokens": "f57b0117d5493506" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "55da1a0a0bd33722", |
|
"hash_full_prompts": "55da1a0a0bd33722", |
|
"hash_input_tokens": "5a8fa193074b3ca3", |
|
"hash_cont_tokens": "c9742de1a226550f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "c33e143163049176", |
|
"hash_full_prompts": "c33e143163049176", |
|
"hash_input_tokens": "078ed8c3fa3bc8e9", |
|
"hash_cont_tokens": "16f983144c324c70" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 684, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "ebdab1cdb7e555df", |
|
"hash_full_prompts": "ebdab1cdb7e555df", |
|
"hash_input_tokens": "e156982a768f804a", |
|
"hash_cont_tokens": "775e75c894c6a68a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 408, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "a24fd7d08a560921", |
|
"hash_full_prompts": "a24fd7d08a560921", |
|
"hash_input_tokens": "ab40d5df656b2df9", |
|
"hash_cont_tokens": "7ffc0312d6fa396e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "8300977a79386993", |
|
"hash_full_prompts": "8300977a79386993", |
|
"hash_input_tokens": "a11b8a3dc5ca8626", |
|
"hash_cont_tokens": "bc507e44b2db8146" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "ddde36788a04a46f", |
|
"hash_full_prompts": "ddde36788a04a46f", |
|
"hash_input_tokens": "086789ac270f0fb8", |
|
"hash_cont_tokens": "7421c15af82190b8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 452, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "acbc5def98c19b3f", |
|
"hash_full_prompts": "acbc5def98c19b3f", |
|
"hash_input_tokens": "f1aeb4190769da1b", |
|
"hash_cont_tokens": "fde6678d78c6175b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "146e61d07497a9bd", |
|
"hash_full_prompts": "146e61d07497a9bd", |
|
"hash_input_tokens": "7f7e3706b777d772", |
|
"hash_cont_tokens": "99b1b1787c6b587a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1480, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "8635216e1909a03f", |
|
"hash_full_prompts": "8635216e1909a03f", |
|
"hash_input_tokens": "7d278bc74a8975ff", |
|
"hash_cont_tokens": "76f58b417f117854" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 500, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "30b315aa6353ee47", |
|
"hash_full_prompts": "30b315aa6353ee47", |
|
"hash_input_tokens": "94a3f760be5add9e", |
|
"hash_cont_tokens": "57002e98809bf710" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "c9136373af2180de", |
|
"hash_full_prompts": "c9136373af2180de", |
|
"hash_input_tokens": "1657f58c617fa9c6", |
|
"hash_cont_tokens": "793c1f6093f18378" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 310, |
|
"padded": 1224, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "b0661bfa1add6404", |
|
"hash_full_prompts": "b0661bfa1add6404", |
|
"hash_input_tokens": "48024c3cae4ac8f9", |
|
"hash_cont_tokens": "10a87034ea31a82e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 808, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "80fc1d623a3d665f", |
|
"hash_full_prompts": "80fc1d623a3d665f", |
|
"hash_input_tokens": "898b574f856cb636", |
|
"hash_cont_tokens": "cfb8bbe184073f18" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "854da6e5af0fe1a1", |
|
"hash_full_prompts": "854da6e5af0fe1a1", |
|
"hash_input_tokens": "15ecc775303b9a1d", |
|
"hash_cont_tokens": "fa695ce00a41ce0a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 165, |
|
"padded": 660, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "7dc963c7acd19ad8", |
|
"hash_full_prompts": "7dc963c7acd19ad8", |
|
"hash_input_tokens": "f1edca5b8c432bbf", |
|
"hash_cont_tokens": "8d79b0f69b217745" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 788, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "1f675dcdebc9758f", |
|
"hash_full_prompts": "1f675dcdebc9758f", |
|
"hash_input_tokens": "8881b114a1e4ad12", |
|
"hash_cont_tokens": "8f802c7dc09a6be2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 760, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "2fb32cf2d80f0b35", |
|
"hash_full_prompts": "2fb32cf2d80f0b35", |
|
"hash_input_tokens": "25cb763d830fd806", |
|
"hash_cont_tokens": "8893e7463bbf4b91" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1536, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "fd6646fdb5d58a1f", |
|
"hash_full_prompts": "fd6646fdb5d58a1f", |
|
"hash_input_tokens": "a8906d9e33e0214a", |
|
"hash_cont_tokens": "4868955bace38070" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1076, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "2118f21f71d87d84", |
|
"hash_full_prompts": "2118f21f71d87d84", |
|
"hash_input_tokens": "ca481d3baad7f289", |
|
"hash_cont_tokens": "e8b823787b710a12" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 948, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "dc3ce06378548565", |
|
"hash_full_prompts": "dc3ce06378548565", |
|
"hash_input_tokens": "4259b80a7d21ef74", |
|
"hash_cont_tokens": "834f1c074b49339e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 600, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "c8d1d98a40e11f2f", |
|
"hash_full_prompts": "c8d1d98a40e11f2f", |
|
"hash_input_tokens": "490ba9909b535d3d", |
|
"hash_cont_tokens": "54161589e46df634" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2152, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "666c8759b98ee4ff", |
|
"hash_full_prompts": "666c8759b98ee4ff", |
|
"hash_input_tokens": "5556bcba1b0565fa", |
|
"hash_cont_tokens": "833eec6f54b0c732" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 216, |
|
"padded": 860, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "95fef1c4b7d3f81e", |
|
"hash_full_prompts": "95fef1c4b7d3f81e", |
|
"hash_input_tokens": "b624d98485c9cee2", |
|
"hash_cont_tokens": "2453ef70d8102d77" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "7e5085b6184b0322", |
|
"hash_full_prompts": "7e5085b6184b0322", |
|
"hash_input_tokens": "80915978e077fe68", |
|
"hash_cont_tokens": "0f88d74be6b857ea" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "c17333e7c7c10797", |
|
"hash_full_prompts": "c17333e7c7c10797", |
|
"hash_input_tokens": "8a00ffa15745266e", |
|
"hash_cont_tokens": "20d50fbd57370a6c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 892, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "4edd1e9045df5e3d", |
|
"hash_full_prompts": "4edd1e9045df5e3d", |
|
"hash_input_tokens": "3e04489c11225f01", |
|
"hash_cont_tokens": "a6a547daf0286e50" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 520, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "db2fa00d771a062a", |
|
"hash_full_prompts": "db2fa00d771a062a", |
|
"hash_input_tokens": "e8e6b00efbc87643", |
|
"hash_cont_tokens": "b5407c77db7f2f4b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 476, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "e956f86b124076fe", |
|
"hash_full_prompts": "e956f86b124076fe", |
|
"hash_input_tokens": "55f2b05e593d44f2", |
|
"hash_cont_tokens": "9bf3c173ffe7a067" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 424, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "956e0e6365ab79f1", |
|
"hash_full_prompts": "956e0e6365ab79f1", |
|
"hash_input_tokens": "04cc61b11aa11edd", |
|
"hash_cont_tokens": "f0ffa328ffbc1639" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 648, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "397997cc6f4d581e", |
|
"hash_full_prompts": "397997cc6f4d581e", |
|
"hash_input_tokens": "a5c4d754ce7e9176", |
|
"hash_cont_tokens": "8930a57f098fa487" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 444, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:management|0": { |
|
"hashes": { |
|
"hash_examples": "2bcbe6f6ca63d740", |
|
"hash_full_prompts": "2bcbe6f6ca63d740", |
|
"hash_input_tokens": "df114f5949470e68", |
|
"hash_cont_tokens": "c6864e99f08c42e2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 412, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "8ddb20d964a1b065", |
|
"hash_full_prompts": "8ddb20d964a1b065", |
|
"hash_input_tokens": "50d069db0dce34df", |
|
"hash_cont_tokens": "0ee3c521bbd24c81" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 932, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "182a71f4763d2cea", |
|
"hash_full_prompts": "182a71f4763d2cea", |
|
"hash_input_tokens": "4a40133ad3a15075", |
|
"hash_cont_tokens": "57002e98809bf710" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "4c404fdbb4ca57fc", |
|
"hash_full_prompts": "4c404fdbb4ca57fc", |
|
"hash_input_tokens": "adec3091ce5943d1", |
|
"hash_cont_tokens": "33fa59579b01dcf7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 783, |
|
"padded": 3124, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "60cbd2baa3fea5c9", |
|
"hash_full_prompts": "60cbd2baa3fea5c9", |
|
"hash_input_tokens": "275a765273325318", |
|
"hash_cont_tokens": "aeb2e3df581375d2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1356, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "fd8b0431fbdd75ef", |
|
"hash_full_prompts": "fd8b0431fbdd75ef", |
|
"hash_input_tokens": "dbbd61f7dcb34df7", |
|
"hash_cont_tokens": "f97e5f05a6fab216" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "71e55e2b829b6528", |
|
"hash_full_prompts": "71e55e2b829b6528", |
|
"hash_input_tokens": "1d592d46420a7af8", |
|
"hash_cont_tokens": "0cff076b60b939b9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1220, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "a6d489a8d208fa4b", |
|
"hash_full_prompts": "a6d489a8d208fa4b", |
|
"hash_input_tokens": "a09ce450f5ef1e7a", |
|
"hash_cont_tokens": "fa9d6a93a12be228" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1244, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "6cc50f032a19acaa", |
|
"hash_full_prompts": "6cc50f032a19acaa", |
|
"hash_input_tokens": "4c8a25175d952d1a", |
|
"hash_cont_tokens": "2551df3b83d95c91" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1288, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "50f57ab32f5f6cea", |
|
"hash_full_prompts": "50f57ab32f5f6cea", |
|
"hash_input_tokens": "39d5857440021ead", |
|
"hash_cont_tokens": "8dfb834ed69b9a63" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 282, |
|
"padded": 1108, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "a8fdc85c64f4b215", |
|
"hash_full_prompts": "a8fdc85c64f4b215", |
|
"hash_input_tokens": "58b944eb0581064f", |
|
"hash_cont_tokens": "90aa19208cf48371" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1534, |
|
"padded": 6132, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "c373a28a3050a73a", |
|
"hash_full_prompts": "c373a28a3050a73a", |
|
"hash_input_tokens": "03999f2b4bc3d63c", |
|
"hash_cont_tokens": "87192b5d770945cb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 1088, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "bf5254fe818356af", |
|
"hash_full_prompts": "bf5254fe818356af", |
|
"hash_input_tokens": "c97b675eaa9b4bf4", |
|
"hash_cont_tokens": "75c67c831f21f75b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2416, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "b66d52e28e7d14e0", |
|
"hash_full_prompts": "b66d52e28e7d14e0", |
|
"hash_input_tokens": "35c5d7600493ff13", |
|
"hash_cont_tokens": "500c34e39ca8a57d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 440, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "514c14feaf000ad9", |
|
"hash_full_prompts": "514c14feaf000ad9", |
|
"hash_input_tokens": "bce1c31864d3a9d1", |
|
"hash_cont_tokens": "f4787430091b26e1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 980, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "f6c9bc9d18c80870", |
|
"hash_full_prompts": "f6c9bc9d18c80870", |
|
"hash_input_tokens": "10dd0c8cfe298b5f", |
|
"hash_cont_tokens": "d2e9915d64a35e7a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 796, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "ed7b78629db6678f", |
|
"hash_full_prompts": "ed7b78629db6678f", |
|
"hash_input_tokens": "f2fb51e41e4463e9", |
|
"hash_cont_tokens": "1a7af3ca7869de2e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:virology|0": { |
|
"hashes": { |
|
"hash_examples": "bc52ffdc3f9b994a", |
|
"hash_full_prompts": "bc52ffdc3f9b994a", |
|
"hash_input_tokens": "82148f4342caa66d", |
|
"hash_cont_tokens": "0be66455983a917e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 664, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_mc:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "ecdb4a4f94f62930", |
|
"hash_full_prompts": "ecdb4a4f94f62930", |
|
"hash_input_tokens": "f9d6bc2fb36855ea", |
|
"hash_cont_tokens": "584bbaf1621cc1f9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 684, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|mmlu_pro_cloze|0": { |
|
"hashes": { |
|
"hash_examples": "845e15cfeee1fc11", |
|
"hash_full_prompts": "845e15cfeee1fc11", |
|
"hash_input_tokens": "dbc4fdb4c171fc52", |
|
"hash_cont_tokens": "72e731df47f7ef00" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 12032, |
|
"padded": 111558, |
|
"non_padded": 2437, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|openbookqa|0": { |
|
"hashes": { |
|
"hash_examples": "fd427af2ef0577e3", |
|
"hash_full_prompts": "fd427af2ef0577e3", |
|
"hash_input_tokens": "924e3881a38e2c6c", |
|
"hash_cont_tokens": "30d1cae5d2df9faa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 500, |
|
"padded": 1994, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|piqa|0": { |
|
"hashes": { |
|
"hash_examples": "f7e288a8894cd149", |
|
"hash_full_prompts": "f7e288a8894cd149", |
|
"hash_input_tokens": "2a53824cfb6e393d", |
|
"hash_cont_tokens": "89c61f01a2e4acbb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1838, |
|
"padded": 3602, |
|
"non_padded": 74, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|siqa|0": { |
|
"hashes": { |
|
"hash_examples": "c62abc8ecbd49cc4", |
|
"hash_full_prompts": "c62abc8ecbd49cc4", |
|
"hash_input_tokens": "2eefab74aa9434f9", |
|
"hash_cont_tokens": "2beff714f7e6bb8b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1954, |
|
"padded": 5782, |
|
"non_padded": 80, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|winogrande|0": { |
|
"hashes": { |
|
"hash_examples": "087d5d1a1afd4c7b", |
|
"hash_full_prompts": "087d5d1a1afd4c7b", |
|
"hash_input_tokens": "17ac595c26748c2b", |
|
"hash_cont_tokens": "d9811a5ce62e0917" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1267, |
|
"padded": 2532, |
|
"non_padded": 2, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|gsm8k|5": { |
|
"hashes": { |
|
"hash_examples": "0ed016e24e7512fd", |
|
"hash_full_prompts": "41d55e83abc0e02d", |
|
"hash_input_tokens": "4c698c7148ab9bfc", |
|
"hash_cont_tokens": "2b030b835fe3df09" |
|
}, |
|
"truncated": 1319, |
|
"non_truncated": 0, |
|
"padded": 948, |
|
"non_padded": 371, |
|
"effective_few_shots": 5.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"custom|trivia_qa|0": { |
|
"hashes": { |
|
"hash_examples": "1e083041cb75ff0c", |
|
"hash_full_prompts": "1e083041cb75ff0c", |
|
"hash_input_tokens": "01d7706e0dac8380", |
|
"hash_cont_tokens": "9160ed2743838666" |
|
}, |
|
"truncated": 17944, |
|
"non_truncated": 0, |
|
"padded": 269, |
|
"non_padded": 17675, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "88e809e91c59a89c", |
|
"hash_full_prompts": "60e948f5cd4b4b2e", |
|
"hash_input_tokens": "c4e85a9af5c2ad3f", |
|
"hash_cont_tokens": "788366827136aa8c" |
|
}, |
|
"truncated": 19263, |
|
"non_truncated": 60486, |
|
"padded": 297962, |
|
"non_padded": 22165, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |