mmlu-evals-smollm-360m / qwen2-5_500M.json
loubnabnl's picture
loubnabnl HF staff
add model
09d5129
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 8,
"max_samples": null,
"job_id": "",
"start_time": 2004961.140231565,
"end_time": 2005992.444841733,
"total_evaluation_time_secondes": "1031.3046101678628",
"model_name": "Qwen/Qwen2.5-0.5B",
"model_sha": "060db6499f32faf8b98477b0a26969ef7d8b9987",
"model_dtype": "torch.bfloat16",
"model_size": "942.3 MB",
"config": null
},
"results": {
"custom|arc:challenge|0": {
"acc": 0.2935153583617747,
"acc_stderr": 0.013307250444941122,
"acc_norm": 0.32081911262798635,
"acc_norm_stderr": 0.013640943091946528
},
"custom|arc:easy|0": {
"acc": 0.6430976430976431,
"acc_stderr": 0.009830630210347016,
"acc_norm": 0.5808080808080808,
"acc_norm_stderr": 0.01012490528249118
},
"custom|commonsense_qa|0": {
"acc": 0.29238329238329236,
"acc_stderr": 0.013022531002213358,
"acc_norm": 0.3153153153153153,
"acc_norm_stderr": 0.013302642632905037
},
"custom|hellaswag|0": {
"acc": 0.4025094602668791,
"acc_stderr": 0.004894012555642639,
"acc_norm": 0.5126468830910177,
"acc_norm_stderr": 0.004988184988345287
},
"custom|mmlu_cloze:abstract_algebra|0": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"custom|mmlu_cloze:anatomy|0": {
"acc": 0.34814814814814815,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.041153246103369526
},
"custom|mmlu_cloze:astronomy|0": {
"acc": 0.3026315789473684,
"acc_stderr": 0.037385206761196686,
"acc_norm": 0.39473684210526316,
"acc_norm_stderr": 0.039777499346220734
},
"custom|mmlu_cloze:business_ethics|0": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"custom|mmlu_cloze:clinical_knowledge|0": {
"acc": 0.27169811320754716,
"acc_stderr": 0.02737770662467071,
"acc_norm": 0.3886792452830189,
"acc_norm_stderr": 0.03000048544867599
},
"custom|mmlu_cloze:college_biology|0": {
"acc": 0.3125,
"acc_stderr": 0.038760854559127644,
"acc_norm": 0.3680555555555556,
"acc_norm_stderr": 0.0403299905396072
},
"custom|mmlu_cloze:college_chemistry|0": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"custom|mmlu_cloze:college_computer_science|0": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"custom|mmlu_cloze:college_mathematics|0": {
"acc": 0.17,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165085
},
"custom|mmlu_cloze:college_medicine|0": {
"acc": 0.31213872832369943,
"acc_stderr": 0.03533133389323657,
"acc_norm": 0.30057803468208094,
"acc_norm_stderr": 0.0349610148119118
},
"custom|mmlu_cloze:college_physics|0": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808778,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.043364327079931785
},
"custom|mmlu_cloze:computer_security|0": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"custom|mmlu_cloze:conceptual_physics|0": {
"acc": 0.425531914893617,
"acc_stderr": 0.03232146916224469,
"acc_norm": 0.37872340425531914,
"acc_norm_stderr": 0.03170995606040655
},
"custom|mmlu_cloze:econometrics|0": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.21929824561403508,
"acc_norm_stderr": 0.03892431106518756
},
"custom|mmlu_cloze:electrical_engineering|0": {
"acc": 0.3103448275862069,
"acc_stderr": 0.03855289616378948,
"acc_norm": 0.31724137931034485,
"acc_norm_stderr": 0.03878352372138622
},
"custom|mmlu_cloze:elementary_mathematics|0": {
"acc": 0.4470899470899471,
"acc_stderr": 0.02560672399577703,
"acc_norm": 0.4365079365079365,
"acc_norm_stderr": 0.025542846817400496
},
"custom|mmlu_cloze:formal_logic|0": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303317,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04285714285714281
},
"custom|mmlu_cloze:global_facts|0": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"custom|mmlu_cloze:high_school_biology|0": {
"acc": 0.3193548387096774,
"acc_stderr": 0.026522709674667775,
"acc_norm": 0.36129032258064514,
"acc_norm_stderr": 0.027327548447957543
},
"custom|mmlu_cloze:high_school_chemistry|0": {
"acc": 0.2019704433497537,
"acc_stderr": 0.02824735012218027,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.03178529710642749
},
"custom|mmlu_cloze:high_school_computer_science|0": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"custom|mmlu_cloze:high_school_european_history|0": {
"acc": 0.28484848484848485,
"acc_stderr": 0.035243908445117836,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.03851716319398394
},
"custom|mmlu_cloze:high_school_geography|0": {
"acc": 0.3686868686868687,
"acc_stderr": 0.034373055019806184,
"acc_norm": 0.3484848484848485,
"acc_norm_stderr": 0.033948539651564025
},
"custom|mmlu_cloze:high_school_government_and_politics|0": {
"acc": 0.38341968911917096,
"acc_stderr": 0.03508984236295342,
"acc_norm": 0.37823834196891193,
"acc_norm_stderr": 0.03499807276193337
},
"custom|mmlu_cloze:high_school_macroeconomics|0": {
"acc": 0.3153846153846154,
"acc_stderr": 0.02355964698318995,
"acc_norm": 0.36153846153846153,
"acc_norm_stderr": 0.02435958146539699
},
"custom|mmlu_cloze:high_school_mathematics|0": {
"acc": 0.1962962962962963,
"acc_stderr": 0.024217421327417145,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.026962424325073838
},
"custom|mmlu_cloze:high_school_microeconomics|0": {
"acc": 0.3403361344537815,
"acc_stderr": 0.030778057422931673,
"acc_norm": 0.40336134453781514,
"acc_norm_stderr": 0.031866081214088314
},
"custom|mmlu_cloze:high_school_physics|0": {
"acc": 0.2781456953642384,
"acc_stderr": 0.03658603262763743,
"acc_norm": 0.26490066225165565,
"acc_norm_stderr": 0.03603038545360385
},
"custom|mmlu_cloze:high_school_psychology|0": {
"acc": 0.44954128440366975,
"acc_stderr": 0.021327881417823373,
"acc_norm": 0.44770642201834865,
"acc_norm_stderr": 0.021319754962425462
},
"custom|mmlu_cloze:high_school_statistics|0": {
"acc": 0.28703703703703703,
"acc_stderr": 0.03085199299325701,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03214952147802747
},
"custom|mmlu_cloze:high_school_us_history|0": {
"acc": 0.27450980392156865,
"acc_stderr": 0.0313217980308329,
"acc_norm": 0.3284313725490196,
"acc_norm_stderr": 0.03296245110172228
},
"custom|mmlu_cloze:high_school_world_history|0": {
"acc": 0.3037974683544304,
"acc_stderr": 0.029936696387138608,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03068582059661081
},
"custom|mmlu_cloze:human_aging|0": {
"acc": 0.3991031390134529,
"acc_stderr": 0.032867453125679603,
"acc_norm": 0.33183856502242154,
"acc_norm_stderr": 0.03160295143776679
},
"custom|mmlu_cloze:human_sexuality|0": {
"acc": 0.3893129770992366,
"acc_stderr": 0.04276486542814591,
"acc_norm": 0.35877862595419846,
"acc_norm_stderr": 0.04206739313864908
},
"custom|mmlu_cloze:international_law|0": {
"acc": 0.17355371900826447,
"acc_stderr": 0.0345727283691767,
"acc_norm": 0.30578512396694213,
"acc_norm_stderr": 0.04205953933884123
},
"custom|mmlu_cloze:jurisprudence|0": {
"acc": 0.23148148148148148,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.4351851851851852,
"acc_norm_stderr": 0.04792898170907062
},
"custom|mmlu_cloze:logical_fallacies|0": {
"acc": 0.294478527607362,
"acc_stderr": 0.03581165790474082,
"acc_norm": 0.3312883435582822,
"acc_norm_stderr": 0.03697983910025588
},
"custom|mmlu_cloze:machine_learning|0": {
"acc": 0.23214285714285715,
"acc_stderr": 0.04007341809755805,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04109974682633932
},
"custom|mmlu_cloze:management|0": {
"acc": 0.3300970873786408,
"acc_stderr": 0.0465614711001235,
"acc_norm": 0.44660194174757284,
"acc_norm_stderr": 0.04922424153458933
},
"custom|mmlu_cloze:marketing|0": {
"acc": 0.47863247863247865,
"acc_stderr": 0.03272616447634954,
"acc_norm": 0.47435897435897434,
"acc_norm_stderr": 0.03271298896811159
},
"custom|mmlu_cloze:medical_genetics|0": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"custom|mmlu_cloze:miscellaneous|0": {
"acc": 0.46998722860791825,
"acc_stderr": 0.017847723086649097,
"acc_norm": 0.42656449553001274,
"acc_norm_stderr": 0.01768606697567565
},
"custom|mmlu_cloze:moral_disputes|0": {
"acc": 0.2774566473988439,
"acc_stderr": 0.024105712607754307,
"acc_norm": 0.2398843930635838,
"acc_norm_stderr": 0.022989592543123567
},
"custom|mmlu_cloze:moral_scenarios|0": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"custom|mmlu_cloze:nutrition|0": {
"acc": 0.25163398692810457,
"acc_stderr": 0.0248480182638752,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.02736359328468493
},
"custom|mmlu_cloze:philosophy|0": {
"acc": 0.2797427652733119,
"acc_stderr": 0.025494259350694888,
"acc_norm": 0.3183279742765273,
"acc_norm_stderr": 0.026457225067811025
},
"custom|mmlu_cloze:prehistory|0": {
"acc": 0.3611111111111111,
"acc_stderr": 0.02672586880910079,
"acc_norm": 0.31790123456790126,
"acc_norm_stderr": 0.025910063528240868
},
"custom|mmlu_cloze:professional_accounting|0": {
"acc": 0.26595744680851063,
"acc_stderr": 0.02635806569888059,
"acc_norm": 0.24468085106382978,
"acc_norm_stderr": 0.025645553622266722
},
"custom|mmlu_cloze:professional_law|0": {
"acc": 0.2405475880052151,
"acc_stderr": 0.010916406735478949,
"acc_norm": 0.2653194263363755,
"acc_norm_stderr": 0.011276198843958876
},
"custom|mmlu_cloze:professional_medicine|0": {
"acc": 0.27941176470588236,
"acc_stderr": 0.02725720260611495,
"acc_norm": 0.3272058823529412,
"acc_norm_stderr": 0.02850145286039657
},
"custom|mmlu_cloze:professional_psychology|0": {
"acc": 0.3022875816993464,
"acc_stderr": 0.018579232711113877,
"acc_norm": 0.3104575163398693,
"acc_norm_stderr": 0.018718067052623227
},
"custom|mmlu_cloze:public_relations|0": {
"acc": 0.44545454545454544,
"acc_stderr": 0.047605488214603246,
"acc_norm": 0.3090909090909091,
"acc_norm_stderr": 0.044262946482000985
},
"custom|mmlu_cloze:security_studies|0": {
"acc": 0.3306122448979592,
"acc_stderr": 0.030116426296540613,
"acc_norm": 0.24081632653061225,
"acc_norm_stderr": 0.027372942201788174
},
"custom|mmlu_cloze:sociology|0": {
"acc": 0.2835820895522388,
"acc_stderr": 0.03187187537919796,
"acc_norm": 0.3482587064676617,
"acc_norm_stderr": 0.033687874661154596
},
"custom|mmlu_cloze:us_foreign_policy|0": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"custom|mmlu_cloze:virology|0": {
"acc": 0.26506024096385544,
"acc_stderr": 0.03436024037944967,
"acc_norm": 0.3072289156626506,
"acc_norm_stderr": 0.035915667978246635
},
"custom|mmlu_cloze:world_religions|0": {
"acc": 0.3742690058479532,
"acc_stderr": 0.03711601185389481,
"acc_norm": 0.4327485380116959,
"acc_norm_stderr": 0.03799978644370608
},
"custom|mmlu_mc:abstract_algebra|0": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"custom|mmlu_mc:anatomy|0": {
"acc": 0.4148148148148148,
"acc_stderr": 0.042561937679014075,
"acc_norm": 0.4148148148148148,
"acc_norm_stderr": 0.042561937679014075
},
"custom|mmlu_mc:astronomy|0": {
"acc": 0.48026315789473684,
"acc_stderr": 0.040657710025626036,
"acc_norm": 0.48026315789473684,
"acc_norm_stderr": 0.040657710025626036
},
"custom|mmlu_mc:business_ethics|0": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"custom|mmlu_mc:clinical_knowledge|0": {
"acc": 0.5358490566037736,
"acc_stderr": 0.030693675018458003,
"acc_norm": 0.5358490566037736,
"acc_norm_stderr": 0.030693675018458003
},
"custom|mmlu_mc:college_biology|0": {
"acc": 0.4652777777777778,
"acc_stderr": 0.04171115858181618,
"acc_norm": 0.4652777777777778,
"acc_norm_stderr": 0.04171115858181618
},
"custom|mmlu_mc:college_chemistry|0": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"custom|mmlu_mc:college_computer_science|0": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"custom|mmlu_mc:college_mathematics|0": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"custom|mmlu_mc:college_medicine|0": {
"acc": 0.48554913294797686,
"acc_stderr": 0.03810871630454764,
"acc_norm": 0.48554913294797686,
"acc_norm_stderr": 0.03810871630454764
},
"custom|mmlu_mc:college_physics|0": {
"acc": 0.27450980392156865,
"acc_stderr": 0.044405219061793275,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.044405219061793275
},
"custom|mmlu_mc:computer_security|0": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"custom|mmlu_mc:conceptual_physics|0": {
"acc": 0.4127659574468085,
"acc_stderr": 0.03218471141400351,
"acc_norm": 0.4127659574468085,
"acc_norm_stderr": 0.03218471141400351
},
"custom|mmlu_mc:econometrics|0": {
"acc": 0.3508771929824561,
"acc_stderr": 0.04489539350270701,
"acc_norm": 0.3508771929824561,
"acc_norm_stderr": 0.04489539350270701
},
"custom|mmlu_mc:electrical_engineering|0": {
"acc": 0.5379310344827586,
"acc_stderr": 0.041546596717075474,
"acc_norm": 0.5379310344827586,
"acc_norm_stderr": 0.041546596717075474
},
"custom|mmlu_mc:elementary_mathematics|0": {
"acc": 0.3783068783068783,
"acc_stderr": 0.024976954053155243,
"acc_norm": 0.3783068783068783,
"acc_norm_stderr": 0.024976954053155243
},
"custom|mmlu_mc:formal_logic|0": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04285714285714281,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04285714285714281
},
"custom|mmlu_mc:global_facts|0": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"custom|mmlu_mc:high_school_biology|0": {
"acc": 0.5258064516129032,
"acc_stderr": 0.02840609505765332,
"acc_norm": 0.5258064516129032,
"acc_norm_stderr": 0.02840609505765332
},
"custom|mmlu_mc:high_school_chemistry|0": {
"acc": 0.43349753694581283,
"acc_stderr": 0.034867317274198714,
"acc_norm": 0.43349753694581283,
"acc_norm_stderr": 0.034867317274198714
},
"custom|mmlu_mc:high_school_computer_science|0": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"custom|mmlu_mc:high_school_european_history|0": {
"acc": 0.593939393939394,
"acc_stderr": 0.03834816355401181,
"acc_norm": 0.593939393939394,
"acc_norm_stderr": 0.03834816355401181
},
"custom|mmlu_mc:high_school_geography|0": {
"acc": 0.5757575757575758,
"acc_stderr": 0.03521224908841586,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03521224908841586
},
"custom|mmlu_mc:high_school_government_and_politics|0": {
"acc": 0.5699481865284974,
"acc_stderr": 0.03572954333144809,
"acc_norm": 0.5699481865284974,
"acc_norm_stderr": 0.03572954333144809
},
"custom|mmlu_mc:high_school_macroeconomics|0": {
"acc": 0.47435897435897434,
"acc_stderr": 0.025317649726448652,
"acc_norm": 0.47435897435897434,
"acc_norm_stderr": 0.025317649726448652
},
"custom|mmlu_mc:high_school_mathematics|0": {
"acc": 0.35185185185185186,
"acc_stderr": 0.02911661760608302,
"acc_norm": 0.35185185185185186,
"acc_norm_stderr": 0.02911661760608302
},
"custom|mmlu_mc:high_school_microeconomics|0": {
"acc": 0.5126050420168067,
"acc_stderr": 0.032468167657521745,
"acc_norm": 0.5126050420168067,
"acc_norm_stderr": 0.032468167657521745
},
"custom|mmlu_mc:high_school_physics|0": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"custom|mmlu_mc:high_school_psychology|0": {
"acc": 0.6477064220183486,
"acc_stderr": 0.02048056884399899,
"acc_norm": 0.6477064220183486,
"acc_norm_stderr": 0.02048056884399899
},
"custom|mmlu_mc:high_school_statistics|0": {
"acc": 0.37037037037037035,
"acc_stderr": 0.03293377139415191,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.03293377139415191
},
"custom|mmlu_mc:high_school_us_history|0": {
"acc": 0.5245098039215687,
"acc_stderr": 0.03505093194348798,
"acc_norm": 0.5245098039215687,
"acc_norm_stderr": 0.03505093194348798
},
"custom|mmlu_mc:high_school_world_history|0": {
"acc": 0.6118143459915611,
"acc_stderr": 0.03172295004332328,
"acc_norm": 0.6118143459915611,
"acc_norm_stderr": 0.03172295004332328
},
"custom|mmlu_mc:human_aging|0": {
"acc": 0.5201793721973094,
"acc_stderr": 0.033530461674123005,
"acc_norm": 0.5201793721973094,
"acc_norm_stderr": 0.033530461674123005
},
"custom|mmlu_mc:human_sexuality|0": {
"acc": 0.5725190839694656,
"acc_stderr": 0.04338920305792401,
"acc_norm": 0.5725190839694656,
"acc_norm_stderr": 0.04338920305792401
},
"custom|mmlu_mc:international_law|0": {
"acc": 0.71900826446281,
"acc_stderr": 0.04103203830514511,
"acc_norm": 0.71900826446281,
"acc_norm_stderr": 0.04103203830514511
},
"custom|mmlu_mc:jurisprudence|0": {
"acc": 0.6018518518518519,
"acc_stderr": 0.04732332615978814,
"acc_norm": 0.6018518518518519,
"acc_norm_stderr": 0.04732332615978814
},
"custom|mmlu_mc:logical_fallacies|0": {
"acc": 0.49693251533742333,
"acc_stderr": 0.03928297078179662,
"acc_norm": 0.49693251533742333,
"acc_norm_stderr": 0.03928297078179662
},
"custom|mmlu_mc:machine_learning|0": {
"acc": 0.4017857142857143,
"acc_stderr": 0.04653333146973647,
"acc_norm": 0.4017857142857143,
"acc_norm_stderr": 0.04653333146973647
},
"custom|mmlu_mc:management|0": {
"acc": 0.6310679611650486,
"acc_stderr": 0.0477761518115674,
"acc_norm": 0.6310679611650486,
"acc_norm_stderr": 0.0477761518115674
},
"custom|mmlu_mc:marketing|0": {
"acc": 0.7564102564102564,
"acc_stderr": 0.028120966503914394,
"acc_norm": 0.7564102564102564,
"acc_norm_stderr": 0.028120966503914394
},
"custom|mmlu_mc:medical_genetics|0": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"custom|mmlu_mc:miscellaneous|0": {
"acc": 0.5530012771392082,
"acc_stderr": 0.01777922523339421,
"acc_norm": 0.5530012771392082,
"acc_norm_stderr": 0.01777922523339421
},
"custom|mmlu_mc:moral_disputes|0": {
"acc": 0.5,
"acc_stderr": 0.026919095102908273,
"acc_norm": 0.5,
"acc_norm_stderr": 0.026919095102908273
},
"custom|mmlu_mc:moral_scenarios|0": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"custom|mmlu_mc:nutrition|0": {
"acc": 0.6143790849673203,
"acc_stderr": 0.02787074527829027,
"acc_norm": 0.6143790849673203,
"acc_norm_stderr": 0.02787074527829027
},
"custom|mmlu_mc:philosophy|0": {
"acc": 0.4855305466237942,
"acc_stderr": 0.02838619808417768,
"acc_norm": 0.4855305466237942,
"acc_norm_stderr": 0.02838619808417768
},
"custom|mmlu_mc:prehistory|0": {
"acc": 0.5123456790123457,
"acc_stderr": 0.027812262269327235,
"acc_norm": 0.5123456790123457,
"acc_norm_stderr": 0.027812262269327235
},
"custom|mmlu_mc:professional_accounting|0": {
"acc": 0.36524822695035464,
"acc_stderr": 0.02872386385328128,
"acc_norm": 0.36524822695035464,
"acc_norm_stderr": 0.02872386385328128
},
"custom|mmlu_mc:professional_law|0": {
"acc": 0.34419817470664926,
"acc_stderr": 0.012134433741002574,
"acc_norm": 0.34419817470664926,
"acc_norm_stderr": 0.012134433741002574
},
"custom|mmlu_mc:professional_medicine|0": {
"acc": 0.43014705882352944,
"acc_stderr": 0.030074971917302875,
"acc_norm": 0.43014705882352944,
"acc_norm_stderr": 0.030074971917302875
},
"custom|mmlu_mc:professional_psychology|0": {
"acc": 0.4591503267973856,
"acc_stderr": 0.020160213617222516,
"acc_norm": 0.4591503267973856,
"acc_norm_stderr": 0.020160213617222516
},
"custom|mmlu_mc:public_relations|0": {
"acc": 0.5909090909090909,
"acc_stderr": 0.04709306978661895,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.04709306978661895
},
"custom|mmlu_mc:security_studies|0": {
"acc": 0.6163265306122448,
"acc_stderr": 0.03113088039623593,
"acc_norm": 0.6163265306122448,
"acc_norm_stderr": 0.03113088039623593
},
"custom|mmlu_mc:sociology|0": {
"acc": 0.6965174129353234,
"acc_stderr": 0.03251006816458619,
"acc_norm": 0.6965174129353234,
"acc_norm_stderr": 0.03251006816458619
},
"custom|mmlu_mc:us_foreign_policy|0": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"custom|mmlu_mc:virology|0": {
"acc": 0.45180722891566266,
"acc_stderr": 0.038743715565879536,
"acc_norm": 0.45180722891566266,
"acc_norm_stderr": 0.038743715565879536
},
"custom|mmlu_mc:world_religions|0": {
"acc": 0.5906432748538012,
"acc_stderr": 0.03771283107626545,
"acc_norm": 0.5906432748538012,
"acc_norm_stderr": 0.03771283107626545
},
"custom|mmlu_pro_cloze|0": {
"acc": 0.11269946808510638,
"acc_stderr": 0.0028830052993390953,
"acc_norm": 0.10347406914893617,
"acc_norm_stderr": 0.002776811047097138
},
"custom|openbookqa|0": {
"acc": 0.224,
"acc_stderr": 0.018663994464710797,
"acc_norm": 0.378,
"acc_norm_stderr": 0.021706550824518184
},
"custom|piqa|0": {
"acc": 0.7034820457018498,
"acc_stderr": 0.010656078922661148,
"acc_norm": 0.6980413492927094,
"acc_norm_stderr": 0.010711732891588341
},
"custom|siqa|0": {
"acc": 0.40276356192425794,
"acc_stderr": 0.011098061143371352,
"acc_norm": 0.4329580348004094,
"acc_norm_stderr": 0.011211904262208605
},
"custom|winogrande|0": {
"acc": 0.5461720599842147,
"acc_stderr": 0.013992441563707063,
"acc_norm": 0.5374901341752171,
"acc_norm_stderr": 0.014012928183336574
},
"custom|gsm8k|5": {
"qem": 0.34420015163002277,
"qem_stderr": 0.013086800426693784
},
"custom|trivia_qa|0": {
"qem": 0.04358002674988854,
"qem_stderr": 0.0015241248224145741
},
"custom|arc:_average|0": {
"acc": 0.4683065007297089,
"acc_stderr": 0.011568940327644068,
"acc_norm": 0.45081359671803356,
"acc_norm_stderr": 0.011882924187218855
},
"custom|mmlu_cloze:_average|0": {
"acc": 0.31630907240132794,
"acc_stderr": 0.03422335974354886,
"acc_norm": 0.3374225197619436,
"acc_norm_stderr": 0.034973693369716986
},
"custom|mmlu_mc:_average|0": {
"acc": 0.4896716170891279,
"acc_stderr": 0.03621668643274436,
"acc_norm": 0.4896716170891279,
"acc_norm_stderr": 0.03621668643274436
},
"all": {
"acc": 0.402939204802935,
"acc_stderr": 0.03344252550939551,
"acc_norm": 0.4148286079656159,
"acc_norm_stderr": 0.03382380692637996,
"qem": 0.19389008918995565,
"qem_stderr": 0.007305462624554179
}
},
"versions": {
"custom|arc:challenge|0": 0,
"custom|arc:easy|0": 0,
"custom|commonsense_qa|0": 0,
"custom|gsm8k|5": 0,
"custom|hellaswag|0": 0,
"custom|mmlu_cloze:abstract_algebra|0": 0,
"custom|mmlu_cloze:anatomy|0": 0,
"custom|mmlu_cloze:astronomy|0": 0,
"custom|mmlu_cloze:business_ethics|0": 0,
"custom|mmlu_cloze:clinical_knowledge|0": 0,
"custom|mmlu_cloze:college_biology|0": 0,
"custom|mmlu_cloze:college_chemistry|0": 0,
"custom|mmlu_cloze:college_computer_science|0": 0,
"custom|mmlu_cloze:college_mathematics|0": 0,
"custom|mmlu_cloze:college_medicine|0": 0,
"custom|mmlu_cloze:college_physics|0": 0,
"custom|mmlu_cloze:computer_security|0": 0,
"custom|mmlu_cloze:conceptual_physics|0": 0,
"custom|mmlu_cloze:econometrics|0": 0,
"custom|mmlu_cloze:electrical_engineering|0": 0,
"custom|mmlu_cloze:elementary_mathematics|0": 0,
"custom|mmlu_cloze:formal_logic|0": 0,
"custom|mmlu_cloze:global_facts|0": 0,
"custom|mmlu_cloze:high_school_biology|0": 0,
"custom|mmlu_cloze:high_school_chemistry|0": 0,
"custom|mmlu_cloze:high_school_computer_science|0": 0,
"custom|mmlu_cloze:high_school_european_history|0": 0,
"custom|mmlu_cloze:high_school_geography|0": 0,
"custom|mmlu_cloze:high_school_government_and_politics|0": 0,
"custom|mmlu_cloze:high_school_macroeconomics|0": 0,
"custom|mmlu_cloze:high_school_mathematics|0": 0,
"custom|mmlu_cloze:high_school_microeconomics|0": 0,
"custom|mmlu_cloze:high_school_physics|0": 0,
"custom|mmlu_cloze:high_school_psychology|0": 0,
"custom|mmlu_cloze:high_school_statistics|0": 0,
"custom|mmlu_cloze:high_school_us_history|0": 0,
"custom|mmlu_cloze:high_school_world_history|0": 0,
"custom|mmlu_cloze:human_aging|0": 0,
"custom|mmlu_cloze:human_sexuality|0": 0,
"custom|mmlu_cloze:international_law|0": 0,
"custom|mmlu_cloze:jurisprudence|0": 0,
"custom|mmlu_cloze:logical_fallacies|0": 0,
"custom|mmlu_cloze:machine_learning|0": 0,
"custom|mmlu_cloze:management|0": 0,
"custom|mmlu_cloze:marketing|0": 0,
"custom|mmlu_cloze:medical_genetics|0": 0,
"custom|mmlu_cloze:miscellaneous|0": 0,
"custom|mmlu_cloze:moral_disputes|0": 0,
"custom|mmlu_cloze:moral_scenarios|0": 0,
"custom|mmlu_cloze:nutrition|0": 0,
"custom|mmlu_cloze:philosophy|0": 0,
"custom|mmlu_cloze:prehistory|0": 0,
"custom|mmlu_cloze:professional_accounting|0": 0,
"custom|mmlu_cloze:professional_law|0": 0,
"custom|mmlu_cloze:professional_medicine|0": 0,
"custom|mmlu_cloze:professional_psychology|0": 0,
"custom|mmlu_cloze:public_relations|0": 0,
"custom|mmlu_cloze:security_studies|0": 0,
"custom|mmlu_cloze:sociology|0": 0,
"custom|mmlu_cloze:us_foreign_policy|0": 0,
"custom|mmlu_cloze:virology|0": 0,
"custom|mmlu_cloze:world_religions|0": 0,
"custom|mmlu_mc:abstract_algebra|0": 0,
"custom|mmlu_mc:anatomy|0": 0,
"custom|mmlu_mc:astronomy|0": 0,
"custom|mmlu_mc:business_ethics|0": 0,
"custom|mmlu_mc:clinical_knowledge|0": 0,
"custom|mmlu_mc:college_biology|0": 0,
"custom|mmlu_mc:college_chemistry|0": 0,
"custom|mmlu_mc:college_computer_science|0": 0,
"custom|mmlu_mc:college_mathematics|0": 0,
"custom|mmlu_mc:college_medicine|0": 0,
"custom|mmlu_mc:college_physics|0": 0,
"custom|mmlu_mc:computer_security|0": 0,
"custom|mmlu_mc:conceptual_physics|0": 0,
"custom|mmlu_mc:econometrics|0": 0,
"custom|mmlu_mc:electrical_engineering|0": 0,
"custom|mmlu_mc:elementary_mathematics|0": 0,
"custom|mmlu_mc:formal_logic|0": 0,
"custom|mmlu_mc:global_facts|0": 0,
"custom|mmlu_mc:high_school_biology|0": 0,
"custom|mmlu_mc:high_school_chemistry|0": 0,
"custom|mmlu_mc:high_school_computer_science|0": 0,
"custom|mmlu_mc:high_school_european_history|0": 0,
"custom|mmlu_mc:high_school_geography|0": 0,
"custom|mmlu_mc:high_school_government_and_politics|0": 0,
"custom|mmlu_mc:high_school_macroeconomics|0": 0,
"custom|mmlu_mc:high_school_mathematics|0": 0,
"custom|mmlu_mc:high_school_microeconomics|0": 0,
"custom|mmlu_mc:high_school_physics|0": 0,
"custom|mmlu_mc:high_school_psychology|0": 0,
"custom|mmlu_mc:high_school_statistics|0": 0,
"custom|mmlu_mc:high_school_us_history|0": 0,
"custom|mmlu_mc:high_school_world_history|0": 0,
"custom|mmlu_mc:human_aging|0": 0,
"custom|mmlu_mc:human_sexuality|0": 0,
"custom|mmlu_mc:international_law|0": 0,
"custom|mmlu_mc:jurisprudence|0": 0,
"custom|mmlu_mc:logical_fallacies|0": 0,
"custom|mmlu_mc:machine_learning|0": 0,
"custom|mmlu_mc:management|0": 0,
"custom|mmlu_mc:marketing|0": 0,
"custom|mmlu_mc:medical_genetics|0": 0,
"custom|mmlu_mc:miscellaneous|0": 0,
"custom|mmlu_mc:moral_disputes|0": 0,
"custom|mmlu_mc:moral_scenarios|0": 0,
"custom|mmlu_mc:nutrition|0": 0,
"custom|mmlu_mc:philosophy|0": 0,
"custom|mmlu_mc:prehistory|0": 0,
"custom|mmlu_mc:professional_accounting|0": 0,
"custom|mmlu_mc:professional_law|0": 0,
"custom|mmlu_mc:professional_medicine|0": 0,
"custom|mmlu_mc:professional_psychology|0": 0,
"custom|mmlu_mc:public_relations|0": 0,
"custom|mmlu_mc:security_studies|0": 0,
"custom|mmlu_mc:sociology|0": 0,
"custom|mmlu_mc:us_foreign_policy|0": 0,
"custom|mmlu_mc:virology|0": 0,
"custom|mmlu_mc:world_religions|0": 0,
"custom|mmlu_pro_cloze|0": 0,
"custom|openbookqa|0": 0,
"custom|piqa|0": 0,
"custom|siqa|0": 0,
"custom|trivia_qa|0": 0,
"custom|winogrande|0": 0
},
"config_tasks": {
"custom|arc:challenge": {
"name": "arc:challenge",
"prompt_function": "arc",
"hf_repo": "ai2_arc",
"hf_subset": "ARC-Challenge",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1172,
"effective_num_docs": 1172,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|arc:easy": {
"name": "arc:easy",
"prompt_function": "arc",
"hf_repo": "ai2_arc",
"hf_subset": "ARC-Easy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 2376,
"effective_num_docs": 2376,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|commonsense_qa": {
"name": "commonsense_qa",
"prompt_function": "commonsense_qa_prompt",
"hf_repo": "commonsense_qa",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1221,
"effective_num_docs": 1221,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|gsm8k": {
"name": "gsm8k",
"prompt_function": "gsm8k",
"hf_repo": "gsm8k",
"hf_subset": "main",
"metric": [
"quasi_exact_match_gsm8k"
],
"hf_avail_splits": [
"train",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling_from_train",
"generation_size": 256,
"stop_sequence": [
"Question:",
"Question"
],
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1319,
"effective_num_docs": 1319,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|hellaswag": {
"name": "hellaswag",
"prompt_function": "hellaswag_prompt",
"hf_repo": "hellaswag",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 10042,
"effective_num_docs": 10042,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:abstract_algebra": {
"name": "mmlu_cloze:abstract_algebra",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:anatomy": {
"name": "mmlu_cloze:anatomy",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:astronomy": {
"name": "mmlu_cloze:astronomy",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:business_ethics": {
"name": "mmlu_cloze:business_ethics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:clinical_knowledge": {
"name": "mmlu_cloze:clinical_knowledge",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:college_biology": {
"name": "mmlu_cloze:college_biology",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:college_chemistry": {
"name": "mmlu_cloze:college_chemistry",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:college_computer_science": {
"name": "mmlu_cloze:college_computer_science",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:college_mathematics": {
"name": "mmlu_cloze:college_mathematics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:college_medicine": {
"name": "mmlu_cloze:college_medicine",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:college_physics": {
"name": "mmlu_cloze:college_physics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:computer_security": {
"name": "mmlu_cloze:computer_security",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:conceptual_physics": {
"name": "mmlu_cloze:conceptual_physics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:econometrics": {
"name": "mmlu_cloze:econometrics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:electrical_engineering": {
"name": "mmlu_cloze:electrical_engineering",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:elementary_mathematics": {
"name": "mmlu_cloze:elementary_mathematics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:formal_logic": {
"name": "mmlu_cloze:formal_logic",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:global_facts": {
"name": "mmlu_cloze:global_facts",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_biology": {
"name": "mmlu_cloze:high_school_biology",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 310,
"effective_num_docs": 310,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_chemistry": {
"name": "mmlu_cloze:high_school_chemistry",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_computer_science": {
"name": "mmlu_cloze:high_school_computer_science",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_european_history": {
"name": "mmlu_cloze:high_school_european_history",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 165,
"effective_num_docs": 165,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_geography": {
"name": "mmlu_cloze:high_school_geography",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_government_and_politics": {
"name": "mmlu_cloze:high_school_government_and_politics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_macroeconomics": {
"name": "mmlu_cloze:high_school_macroeconomics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_mathematics": {
"name": "mmlu_cloze:high_school_mathematics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_microeconomics": {
"name": "mmlu_cloze:high_school_microeconomics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_physics": {
"name": "mmlu_cloze:high_school_physics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_psychology": {
"name": "mmlu_cloze:high_school_psychology",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_statistics": {
"name": "mmlu_cloze:high_school_statistics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 216,
"effective_num_docs": 216,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_us_history": {
"name": "mmlu_cloze:high_school_us_history",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:high_school_world_history": {
"name": "mmlu_cloze:high_school_world_history",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:human_aging": {
"name": "mmlu_cloze:human_aging",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:human_sexuality": {
"name": "mmlu_cloze:human_sexuality",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:international_law": {
"name": "mmlu_cloze:international_law",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:jurisprudence": {
"name": "mmlu_cloze:jurisprudence",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:logical_fallacies": {
"name": "mmlu_cloze:logical_fallacies",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:machine_learning": {
"name": "mmlu_cloze:machine_learning",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:management": {
"name": "mmlu_cloze:management",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "management",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:marketing": {
"name": "mmlu_cloze:marketing",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:medical_genetics": {
"name": "mmlu_cloze:medical_genetics",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:miscellaneous": {
"name": "mmlu_cloze:miscellaneous",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 783,
"effective_num_docs": 783,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:moral_disputes": {
"name": "mmlu_cloze:moral_disputes",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:moral_scenarios": {
"name": "mmlu_cloze:moral_scenarios",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:nutrition": {
"name": "mmlu_cloze:nutrition",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:philosophy": {
"name": "mmlu_cloze:philosophy",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:prehistory": {
"name": "mmlu_cloze:prehistory",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:professional_accounting": {
"name": "mmlu_cloze:professional_accounting",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 282,
"effective_num_docs": 282,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:professional_law": {
"name": "mmlu_cloze:professional_law",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1534,
"effective_num_docs": 1534,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:professional_medicine": {
"name": "mmlu_cloze:professional_medicine",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:professional_psychology": {
"name": "mmlu_cloze:professional_psychology",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:public_relations": {
"name": "mmlu_cloze:public_relations",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:security_studies": {
"name": "mmlu_cloze:security_studies",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:sociology": {
"name": "mmlu_cloze:sociology",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:us_foreign_policy": {
"name": "mmlu_cloze:us_foreign_policy",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:virology": {
"name": "mmlu_cloze:virology",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_cloze:world_religions": {
"name": "mmlu_cloze:world_religions",
"prompt_function": "mmlu_cloze_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:abstract_algebra": {
"name": "mmlu_mc:abstract_algebra",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:anatomy": {
"name": "mmlu_mc:anatomy",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:astronomy": {
"name": "mmlu_mc:astronomy",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:business_ethics": {
"name": "mmlu_mc:business_ethics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:clinical_knowledge": {
"name": "mmlu_mc:clinical_knowledge",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:college_biology": {
"name": "mmlu_mc:college_biology",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:college_chemistry": {
"name": "mmlu_mc:college_chemistry",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:college_computer_science": {
"name": "mmlu_mc:college_computer_science",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:college_mathematics": {
"name": "mmlu_mc:college_mathematics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:college_medicine": {
"name": "mmlu_mc:college_medicine",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:college_physics": {
"name": "mmlu_mc:college_physics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:computer_security": {
"name": "mmlu_mc:computer_security",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:conceptual_physics": {
"name": "mmlu_mc:conceptual_physics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:econometrics": {
"name": "mmlu_mc:econometrics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:electrical_engineering": {
"name": "mmlu_mc:electrical_engineering",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:elementary_mathematics": {
"name": "mmlu_mc:elementary_mathematics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:formal_logic": {
"name": "mmlu_mc:formal_logic",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:global_facts": {
"name": "mmlu_mc:global_facts",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_biology": {
"name": "mmlu_mc:high_school_biology",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 310,
"effective_num_docs": 310,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_chemistry": {
"name": "mmlu_mc:high_school_chemistry",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_computer_science": {
"name": "mmlu_mc:high_school_computer_science",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_european_history": {
"name": "mmlu_mc:high_school_european_history",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 165,
"effective_num_docs": 165,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_geography": {
"name": "mmlu_mc:high_school_geography",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_government_and_politics": {
"name": "mmlu_mc:high_school_government_and_politics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_macroeconomics": {
"name": "mmlu_mc:high_school_macroeconomics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_mathematics": {
"name": "mmlu_mc:high_school_mathematics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_microeconomics": {
"name": "mmlu_mc:high_school_microeconomics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_physics": {
"name": "mmlu_mc:high_school_physics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_psychology": {
"name": "mmlu_mc:high_school_psychology",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_statistics": {
"name": "mmlu_mc:high_school_statistics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 216,
"effective_num_docs": 216,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_us_history": {
"name": "mmlu_mc:high_school_us_history",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:high_school_world_history": {
"name": "mmlu_mc:high_school_world_history",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:human_aging": {
"name": "mmlu_mc:human_aging",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:human_sexuality": {
"name": "mmlu_mc:human_sexuality",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:international_law": {
"name": "mmlu_mc:international_law",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:jurisprudence": {
"name": "mmlu_mc:jurisprudence",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:logical_fallacies": {
"name": "mmlu_mc:logical_fallacies",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:machine_learning": {
"name": "mmlu_mc:machine_learning",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:management": {
"name": "mmlu_mc:management",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "management",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:marketing": {
"name": "mmlu_mc:marketing",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:medical_genetics": {
"name": "mmlu_mc:medical_genetics",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:miscellaneous": {
"name": "mmlu_mc:miscellaneous",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 783,
"effective_num_docs": 783,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:moral_disputes": {
"name": "mmlu_mc:moral_disputes",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:moral_scenarios": {
"name": "mmlu_mc:moral_scenarios",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:nutrition": {
"name": "mmlu_mc:nutrition",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:philosophy": {
"name": "mmlu_mc:philosophy",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:prehistory": {
"name": "mmlu_mc:prehistory",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:professional_accounting": {
"name": "mmlu_mc:professional_accounting",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 282,
"effective_num_docs": 282,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:professional_law": {
"name": "mmlu_mc:professional_law",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1534,
"effective_num_docs": 1534,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:professional_medicine": {
"name": "mmlu_mc:professional_medicine",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:professional_psychology": {
"name": "mmlu_mc:professional_psychology",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:public_relations": {
"name": "mmlu_mc:public_relations",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:security_studies": {
"name": "mmlu_mc:security_studies",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:sociology": {
"name": "mmlu_mc:sociology",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:us_foreign_policy": {
"name": "mmlu_mc:us_foreign_policy",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:virology": {
"name": "mmlu_mc:virology",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_mc:world_religions": {
"name": "mmlu_mc:world_religions",
"prompt_function": "mmlu_mc_prompt",
"hf_repo": "lighteval/mmlu",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": null,
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|mmlu_pro_cloze": {
"name": "mmlu_pro_cloze",
"prompt_function": "mmlu_pro_cloze_prompt",
"hf_repo": "TIGER-Lab/MMLU-Pro",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": null,
"generation_size": -1,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 12032,
"effective_num_docs": 12032,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|openbookqa": {
"name": "openbookqa",
"prompt_function": "openbookqa",
"hf_repo": "openbookqa",
"hf_subset": "main",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 500,
"effective_num_docs": 500,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|piqa": {
"name": "piqa",
"prompt_function": "piqa_harness",
"hf_repo": "piqa",
"hf_subset": "plain_text",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1838,
"effective_num_docs": 1838,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|siqa": {
"name": "siqa",
"prompt_function": "siqa_prompt",
"hf_repo": "lighteval/siqa",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1954,
"effective_num_docs": 1954,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|trivia_qa": {
"name": "trivia_qa",
"prompt_function": "triviaqa",
"hf_repo": "mandarjoshi/trivia_qa",
"hf_subset": "rc.nocontext",
"metric": [
"quasi_exact_match_triviaqa"
],
"hf_avail_splits": [
"train",
"validation"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": "random_sampling_from_train",
"generation_size": 20,
"stop_sequence": [
"\n",
".",
","
],
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 17944,
"effective_num_docs": 17944,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
},
"custom|winogrande": {
"name": "winogrande",
"prompt_function": "winogrande",
"hf_repo": "winogrande",
"hf_subset": "winogrande_xl",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"train",
"validation",
"test"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": null,
"stop_sequence": null,
"output_regex": null,
"num_samples": null,
"frozen": false,
"suite": [
"custom"
],
"original_num_docs": 1267,
"effective_num_docs": 1267,
"trust_dataset": true,
"must_remove_duplicate_docs": null,
"version": 0
}
},
"summary_tasks": {
"custom|arc:challenge|0": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "17b0cae357c0259e",
"hash_input_tokens": "1a80ba451389147f",
"hash_cont_tokens": "125c49608499af5c"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4687,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|arc:easy|0": {
"hashes": {
"hash_examples": "63703c3cdff55bec",
"hash_full_prompts": "63703c3cdff55bec",
"hash_input_tokens": "d9b56de7e99a5c9c",
"hash_cont_tokens": "966c4235d9e2f244"
},
"truncated": 0,
"non_truncated": 2376,
"padded": 9501,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|commonsense_qa|0": {
"hashes": {
"hash_examples": "2e514c541df5ae5b",
"hash_full_prompts": "2e514c541df5ae5b",
"hash_input_tokens": "3ab0acf7245fa7dd",
"hash_cont_tokens": "c9ae8301e6aeb0f9"
},
"truncated": 0,
"non_truncated": 1221,
"padded": 6105,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|hellaswag|0": {
"hashes": {
"hash_examples": "31985c805c3a737e",
"hash_full_prompts": "31985c805c3a737e",
"hash_input_tokens": "cb1b4fddc24d00e3",
"hash_cont_tokens": "30c3f5fcc8e12310"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 39721,
"non_padded": 447,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:abstract_algebra|0": {
"hashes": {
"hash_examples": "ff00c12a680621ba",
"hash_full_prompts": "ff00c12a680621ba",
"hash_input_tokens": "7bc65f0dcf4e8b66",
"hash_cont_tokens": "3722ba329e325bae"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:anatomy|0": {
"hashes": {
"hash_examples": "7f9c9593991d6727",
"hash_full_prompts": "7f9c9593991d6727",
"hash_input_tokens": "8d4f684fbf885541",
"hash_cont_tokens": "10cd8a3e806a8b3a"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:astronomy|0": {
"hashes": {
"hash_examples": "ff5985a306787836",
"hash_full_prompts": "ff5985a306787836",
"hash_input_tokens": "21bc0aee62164a2f",
"hash_cont_tokens": "3487ec01bb06dc28"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:business_ethics|0": {
"hashes": {
"hash_examples": "e3fe02a23d08c2d0",
"hash_full_prompts": "e3fe02a23d08c2d0",
"hash_input_tokens": "dc911e2821af6c2e",
"hash_cont_tokens": "e7e730f1afaf5702"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:clinical_knowledge|0": {
"hashes": {
"hash_examples": "9b962be8e1615cd5",
"hash_full_prompts": "9b962be8e1615cd5",
"hash_input_tokens": "17c60ca3f6378b46",
"hash_cont_tokens": "355af8107d9fa230"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:college_biology|0": {
"hashes": {
"hash_examples": "17b4ca841de3a2a3",
"hash_full_prompts": "17b4ca841de3a2a3",
"hash_input_tokens": "89cef67c04d6f8b8",
"hash_cont_tokens": "ee5ca8d6f749de60"
},
"truncated": 0,
"non_truncated": 144,
"padded": 575,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:college_chemistry|0": {
"hashes": {
"hash_examples": "babea49005fd8249",
"hash_full_prompts": "babea49005fd8249",
"hash_input_tokens": "2d0a4177310fdb6a",
"hash_cont_tokens": "85143a0ca77c0961"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:college_computer_science|0": {
"hashes": {
"hash_examples": "47f4fef1846c8914",
"hash_full_prompts": "47f4fef1846c8914",
"hash_input_tokens": "1d5c7fe78fea8ab5",
"hash_cont_tokens": "8c8f12fb45715ee0"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:college_mathematics|0": {
"hashes": {
"hash_examples": "4d3686d599963414",
"hash_full_prompts": "4d3686d599963414",
"hash_input_tokens": "c14f082c8202b9a8",
"hash_cont_tokens": "d34f1764a6a4b2dd"
},
"truncated": 0,
"non_truncated": 100,
"padded": 398,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:college_medicine|0": {
"hashes": {
"hash_examples": "7209619ecac6f235",
"hash_full_prompts": "7209619ecac6f235",
"hash_input_tokens": "3649c3d32ac146db",
"hash_cont_tokens": "abd1043e1455917b"
},
"truncated": 0,
"non_truncated": 173,
"padded": 689,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:college_physics|0": {
"hashes": {
"hash_examples": "6131b6c60dd7f055",
"hash_full_prompts": "6131b6c60dd7f055",
"hash_input_tokens": "512a5f6aff1f12c5",
"hash_cont_tokens": "d0c750fb2e9f4582"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:computer_security|0": {
"hashes": {
"hash_examples": "99539c9a5bc98a59",
"hash_full_prompts": "99539c9a5bc98a59",
"hash_input_tokens": "56a9385e60e26c39",
"hash_cont_tokens": "2fd9482cc71690a2"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:conceptual_physics|0": {
"hashes": {
"hash_examples": "4e15015839d00858",
"hash_full_prompts": "4e15015839d00858",
"hash_input_tokens": "d3d0394a32fbc2c2",
"hash_cont_tokens": "e863d73d23651a55"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:econometrics|0": {
"hashes": {
"hash_examples": "fba5c647465e89e0",
"hash_full_prompts": "fba5c647465e89e0",
"hash_input_tokens": "1204070a3bb022cd",
"hash_cont_tokens": "222e26b3d18a3157"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:electrical_engineering|0": {
"hashes": {
"hash_examples": "2db2ba0cb98cda51",
"hash_full_prompts": "2db2ba0cb98cda51",
"hash_input_tokens": "4b89f57ffb145417",
"hash_cont_tokens": "5a473530ecc1a291"
},
"truncated": 0,
"non_truncated": 145,
"padded": 580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:elementary_mathematics|0": {
"hashes": {
"hash_examples": "f231cd5ae05742bb",
"hash_full_prompts": "f231cd5ae05742bb",
"hash_input_tokens": "0f4d9df5b33ee979",
"hash_cont_tokens": "33a1bd8906c60cb2"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:formal_logic|0": {
"hashes": {
"hash_examples": "b69d9607d5da536e",
"hash_full_prompts": "b69d9607d5da536e",
"hash_input_tokens": "27e49106a6279f75",
"hash_cont_tokens": "5c87eaf78a589994"
},
"truncated": 0,
"non_truncated": 126,
"padded": 498,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:global_facts|0": {
"hashes": {
"hash_examples": "c9f53772e672f6bc",
"hash_full_prompts": "c9f53772e672f6bc",
"hash_input_tokens": "5a45da5dbbbb93a9",
"hash_cont_tokens": "7cc2f5666bd2595c"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_biology|0": {
"hashes": {
"hash_examples": "455027cf6cdd02bc",
"hash_full_prompts": "455027cf6cdd02bc",
"hash_input_tokens": "0e24c436c50ee6e6",
"hash_cont_tokens": "0914e9d87572c1d6"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1234,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_chemistry|0": {
"hashes": {
"hash_examples": "95d9caac9edbc34d",
"hash_full_prompts": "95d9caac9edbc34d",
"hash_input_tokens": "a65e6b13d286866d",
"hash_cont_tokens": "51132184cc53b900"
},
"truncated": 0,
"non_truncated": 203,
"padded": 805,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_computer_science|0": {
"hashes": {
"hash_examples": "6e44706db3791e51",
"hash_full_prompts": "6e44706db3791e51",
"hash_input_tokens": "b574513e7b6ea3e8",
"hash_cont_tokens": "982e06f31b257b82"
},
"truncated": 0,
"non_truncated": 100,
"padded": 394,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_european_history|0": {
"hashes": {
"hash_examples": "9078fce41897117d",
"hash_full_prompts": "9078fce41897117d",
"hash_input_tokens": "0150f31461ec1aff",
"hash_cont_tokens": "ca0e022c7c90bf56"
},
"truncated": 0,
"non_truncated": 165,
"padded": 660,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_geography|0": {
"hashes": {
"hash_examples": "c68adcc34130a2e8",
"hash_full_prompts": "c68adcc34130a2e8",
"hash_input_tokens": "c5e53e62e82ebb10",
"hash_cont_tokens": "ac8305f6d617f1a7"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "6f839b19e49a0858",
"hash_full_prompts": "6f839b19e49a0858",
"hash_input_tokens": "3ebd70bc972c8010",
"hash_cont_tokens": "b4882210283c8e78"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "708a5c05e7adb220",
"hash_full_prompts": "708a5c05e7adb220",
"hash_input_tokens": "741536c55adc903c",
"hash_cont_tokens": "17b436962516a829"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1557,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_mathematics|0": {
"hashes": {
"hash_examples": "47c621dd61fd7790",
"hash_full_prompts": "47c621dd61fd7790",
"hash_input_tokens": "b0acd1dc9c8c3bd2",
"hash_cont_tokens": "c21561924e89f4e6"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1073,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "3c467180d90f6371",
"hash_full_prompts": "3c467180d90f6371",
"hash_input_tokens": "853fae992ef7b3e8",
"hash_cont_tokens": "a5152854b16dddd6"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_physics|0": {
"hashes": {
"hash_examples": "89a598cdde43be79",
"hash_full_prompts": "89a598cdde43be79",
"hash_input_tokens": "dd352b7719edb42a",
"hash_cont_tokens": "4453c4648b0d3fb1"
},
"truncated": 0,
"non_truncated": 151,
"padded": 602,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_psychology|0": {
"hashes": {
"hash_examples": "d8a7bf3f17ec12d0",
"hash_full_prompts": "d8a7bf3f17ec12d0",
"hash_input_tokens": "807b35bff6e0d108",
"hash_cont_tokens": "ae3f9889156cbf0e"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2172,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_statistics|0": {
"hashes": {
"hash_examples": "995374a6caaa97d6",
"hash_full_prompts": "995374a6caaa97d6",
"hash_input_tokens": "c3a380d2caaea673",
"hash_cont_tokens": "b95ad2aa2091fee6"
},
"truncated": 0,
"non_truncated": 216,
"padded": 860,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_us_history|0": {
"hashes": {
"hash_examples": "7893e9d07e34cb37",
"hash_full_prompts": "7893e9d07e34cb37",
"hash_input_tokens": "7fb9bbd0c5d6e638",
"hash_cont_tokens": "117f507507b38ff1"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:high_school_world_history|0": {
"hashes": {
"hash_examples": "48879684e37d1716",
"hash_full_prompts": "48879684e37d1716",
"hash_input_tokens": "4e508a697a83438f",
"hash_cont_tokens": "feb8fd9bf63ce918"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:human_aging|0": {
"hashes": {
"hash_examples": "afae8c53bd6e5f44",
"hash_full_prompts": "afae8c53bd6e5f44",
"hash_input_tokens": "94d8dc34e705a4bc",
"hash_cont_tokens": "65e40764ff693b00"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:human_sexuality|0": {
"hashes": {
"hash_examples": "9701f02004912a7a",
"hash_full_prompts": "9701f02004912a7a",
"hash_input_tokens": "d3d6425f4edfb085",
"hash_cont_tokens": "109a6cc3d24afa3e"
},
"truncated": 0,
"non_truncated": 131,
"padded": 523,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:international_law|0": {
"hashes": {
"hash_examples": "47955196de2d2c7a",
"hash_full_prompts": "47955196de2d2c7a",
"hash_input_tokens": "bdd15dc02beb3279",
"hash_cont_tokens": "f8883cac3f2ba152"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:jurisprudence|0": {
"hashes": {
"hash_examples": "a992eac2b8ae8bc4",
"hash_full_prompts": "a992eac2b8ae8bc4",
"hash_input_tokens": "b9642a7eac40a64f",
"hash_cont_tokens": "0a554afcae081ada"
},
"truncated": 0,
"non_truncated": 108,
"padded": 431,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:logical_fallacies|0": {
"hashes": {
"hash_examples": "b0d31ed08f699e6c",
"hash_full_prompts": "b0d31ed08f699e6c",
"hash_input_tokens": "dcfeba59590d2733",
"hash_cont_tokens": "6b84656fb38c2583"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:machine_learning|0": {
"hashes": {
"hash_examples": "dccdef2bae4461a6",
"hash_full_prompts": "dccdef2bae4461a6",
"hash_input_tokens": "a06e77c37d98d508",
"hash_cont_tokens": "f40b185fb9843e87"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:management|0": {
"hashes": {
"hash_examples": "f600be25303e1fe2",
"hash_full_prompts": "f600be25303e1fe2",
"hash_input_tokens": "f36b9f20d8b04d9a",
"hash_cont_tokens": "620a0bcabf306e8d"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:marketing|0": {
"hashes": {
"hash_examples": "1a0df3ae5e306669",
"hash_full_prompts": "1a0df3ae5e306669",
"hash_input_tokens": "1e3bbdc729f756af",
"hash_cont_tokens": "c013289c86a92fdb"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:medical_genetics|0": {
"hashes": {
"hash_examples": "eb87c9cfd9b7c760",
"hash_full_prompts": "eb87c9cfd9b7c760",
"hash_input_tokens": "f91941ebf0a7d639",
"hash_cont_tokens": "78bc82424ef7b7d9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:miscellaneous|0": {
"hashes": {
"hash_examples": "f88d724036ba03b7",
"hash_full_prompts": "f88d724036ba03b7",
"hash_input_tokens": "9e596d3b99c33d17",
"hash_cont_tokens": "8b82b7ff69ce32b0"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3131,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:moral_disputes|0": {
"hashes": {
"hash_examples": "79782c0823005e7b",
"hash_full_prompts": "79782c0823005e7b",
"hash_input_tokens": "c8a4c24b156743ed",
"hash_cont_tokens": "9ed381140e06ea2d"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:moral_scenarios|0": {
"hashes": {
"hash_examples": "fe57fdd86442b483",
"hash_full_prompts": "fe57fdd86442b483",
"hash_input_tokens": "a62c968277cfc272",
"hash_cont_tokens": "78d36b9c0004374b"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3529,
"non_padded": 51,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:nutrition|0": {
"hashes": {
"hash_examples": "421f206f5957e90f",
"hash_full_prompts": "421f206f5957e90f",
"hash_input_tokens": "65e465f378fe3751",
"hash_cont_tokens": "c8de353c5d0a45bf"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:philosophy|0": {
"hashes": {
"hash_examples": "c93073e383957fc4",
"hash_full_prompts": "c93073e383957fc4",
"hash_input_tokens": "a89121a0eb5adb24",
"hash_cont_tokens": "f047085684a04f83"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:prehistory|0": {
"hashes": {
"hash_examples": "58ec03e20eae9f90",
"hash_full_prompts": "58ec03e20eae9f90",
"hash_input_tokens": "ae1de55871f9c6aa",
"hash_cont_tokens": "d59b7264b52f62ff"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:professional_accounting|0": {
"hashes": {
"hash_examples": "13f7a6023a118512",
"hash_full_prompts": "13f7a6023a118512",
"hash_input_tokens": "46430b373dae77ef",
"hash_cont_tokens": "d9f26b0e34bf44e3"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1120,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:professional_law|0": {
"hashes": {
"hash_examples": "8086d24f4d4e82f4",
"hash_full_prompts": "8086d24f4d4e82f4",
"hash_input_tokens": "ac86825d4f7d6a2a",
"hash_cont_tokens": "cb5e4423deb3bc06"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6125,
"non_padded": 11,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:professional_medicine|0": {
"hashes": {
"hash_examples": "f0b30b4e786eaeea",
"hash_full_prompts": "f0b30b4e786eaeea",
"hash_input_tokens": "4a7dc1fa8f5d4fc5",
"hash_cont_tokens": "2e9c6460db7cf240"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1074,
"non_padded": 14,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:professional_psychology|0": {
"hashes": {
"hash_examples": "29fd2b4a194c28ea",
"hash_full_prompts": "29fd2b4a194c28ea",
"hash_input_tokens": "87b8c8b1f1e1770e",
"hash_cont_tokens": "57f4ac137227a6a8"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2444,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:public_relations|0": {
"hashes": {
"hash_examples": "52a84bb75dd812eb",
"hash_full_prompts": "52a84bb75dd812eb",
"hash_input_tokens": "886d1bf73db9b3ad",
"hash_cont_tokens": "d45581a5d3f14b94"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:security_studies|0": {
"hashes": {
"hash_examples": "2469ceb06f350432",
"hash_full_prompts": "2469ceb06f350432",
"hash_input_tokens": "c96b3893187077e2",
"hash_cont_tokens": "2f980d82506cfccc"
},
"truncated": 0,
"non_truncated": 245,
"padded": 976,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:sociology|0": {
"hashes": {
"hash_examples": "d2060dedb3fc2bea",
"hash_full_prompts": "d2060dedb3fc2bea",
"hash_input_tokens": "0ad9c72646c9bdc0",
"hash_cont_tokens": "b89d02c97203ff2c"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:us_foreign_policy|0": {
"hashes": {
"hash_examples": "84e882e740d43f01",
"hash_full_prompts": "84e882e740d43f01",
"hash_input_tokens": "59382eb3d92d7114",
"hash_cont_tokens": "e0cb5d016b2da923"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:virology|0": {
"hashes": {
"hash_examples": "0428d2d277aa56aa",
"hash_full_prompts": "0428d2d277aa56aa",
"hash_input_tokens": "2003ed1edec4f410",
"hash_cont_tokens": "a9858490e8a50dcd"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_cloze:world_religions|0": {
"hashes": {
"hash_examples": "7e8f045c67ba6ba1",
"hash_full_prompts": "7e8f045c67ba6ba1",
"hash_input_tokens": "93eb1cc09970d585",
"hash_cont_tokens": "6d6de61b03a0f67a"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:abstract_algebra|0": {
"hashes": {
"hash_examples": "4c76229e00c9c0e9",
"hash_full_prompts": "4c76229e00c9c0e9",
"hash_input_tokens": "1ca9d0b9f1b13c3a",
"hash_cont_tokens": "c8b3e6990f277952"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:anatomy|0": {
"hashes": {
"hash_examples": "6a1f8104dccbd33b",
"hash_full_prompts": "6a1f8104dccbd33b",
"hash_input_tokens": "506a2eb81f0ebd41",
"hash_cont_tokens": "731840296eb8b864"
},
"truncated": 0,
"non_truncated": 135,
"padded": 536,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:astronomy|0": {
"hashes": {
"hash_examples": "1302effa3a76ce4c",
"hash_full_prompts": "1302effa3a76ce4c",
"hash_input_tokens": "240a675542607aaf",
"hash_cont_tokens": "59c4eb6bfd38b563"
},
"truncated": 0,
"non_truncated": 152,
"padded": 600,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:business_ethics|0": {
"hashes": {
"hash_examples": "03cb8bce5336419a",
"hash_full_prompts": "03cb8bce5336419a",
"hash_input_tokens": "f8ffe3485997e69a",
"hash_cont_tokens": "46c3e9b03592b386"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:clinical_knowledge|0": {
"hashes": {
"hash_examples": "ffbb9c7b2be257f9",
"hash_full_prompts": "ffbb9c7b2be257f9",
"hash_input_tokens": "a7b0b622d4a5d914",
"hash_cont_tokens": "259f4c696060c1c2"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1052,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:college_biology|0": {
"hashes": {
"hash_examples": "3ee77f176f38eb8e",
"hash_full_prompts": "3ee77f176f38eb8e",
"hash_input_tokens": "38e90f043b20153c",
"hash_cont_tokens": "4f0931b58aa2150b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 572,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:college_chemistry|0": {
"hashes": {
"hash_examples": "ce61a69c46d47aeb",
"hash_full_prompts": "ce61a69c46d47aeb",
"hash_input_tokens": "2b1a539da4838613",
"hash_cont_tokens": "e9c575e8f2402a43"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:college_computer_science|0": {
"hashes": {
"hash_examples": "32805b52d7d5daab",
"hash_full_prompts": "32805b52d7d5daab",
"hash_input_tokens": "586a10c92c3d4aea",
"hash_cont_tokens": "fb65ba94d74b2c93"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:college_mathematics|0": {
"hashes": {
"hash_examples": "55da1a0a0bd33722",
"hash_full_prompts": "55da1a0a0bd33722",
"hash_input_tokens": "f8efb2761ac7d0f5",
"hash_cont_tokens": "554d506288cb6caa"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:college_medicine|0": {
"hashes": {
"hash_examples": "c33e143163049176",
"hash_full_prompts": "c33e143163049176",
"hash_input_tokens": "398bab21a62403d5",
"hash_cont_tokens": "29e325dea40fe4c9"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:college_physics|0": {
"hashes": {
"hash_examples": "ebdab1cdb7e555df",
"hash_full_prompts": "ebdab1cdb7e555df",
"hash_input_tokens": "aafd604e6a831a90",
"hash_cont_tokens": "9e5c29d773a36305"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:computer_security|0": {
"hashes": {
"hash_examples": "a24fd7d08a560921",
"hash_full_prompts": "a24fd7d08a560921",
"hash_input_tokens": "096a58ef29190974",
"hash_cont_tokens": "8c2b1820aabd2c3f"
},
"truncated": 0,
"non_truncated": 100,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:conceptual_physics|0": {
"hashes": {
"hash_examples": "8300977a79386993",
"hash_full_prompts": "8300977a79386993",
"hash_input_tokens": "487be9bfcc22f8ae",
"hash_cont_tokens": "cb35d879f9089d88"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:econometrics|0": {
"hashes": {
"hash_examples": "ddde36788a04a46f",
"hash_full_prompts": "ddde36788a04a46f",
"hash_input_tokens": "b96822fdac48be2a",
"hash_cont_tokens": "38202b0ccdc9ee23"
},
"truncated": 0,
"non_truncated": 114,
"padded": 448,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:electrical_engineering|0": {
"hashes": {
"hash_examples": "acbc5def98c19b3f",
"hash_full_prompts": "acbc5def98c19b3f",
"hash_input_tokens": "9684d155e984c193",
"hash_cont_tokens": "f086b291b3aa0628"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:elementary_mathematics|0": {
"hashes": {
"hash_examples": "146e61d07497a9bd",
"hash_full_prompts": "146e61d07497a9bd",
"hash_input_tokens": "5f137cae1894870d",
"hash_cont_tokens": "7c1880cb7ab80d0d"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1488,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:formal_logic|0": {
"hashes": {
"hash_examples": "8635216e1909a03f",
"hash_full_prompts": "8635216e1909a03f",
"hash_input_tokens": "371b1b0087b418fd",
"hash_cont_tokens": "105dfb5265994616"
},
"truncated": 0,
"non_truncated": 126,
"padded": 488,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:global_facts|0": {
"hashes": {
"hash_examples": "30b315aa6353ee47",
"hash_full_prompts": "30b315aa6353ee47",
"hash_input_tokens": "91a31641102aa67c",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_biology|0": {
"hashes": {
"hash_examples": "c9136373af2180de",
"hash_full_prompts": "c9136373af2180de",
"hash_input_tokens": "c1f34a414d19873c",
"hash_cont_tokens": "d9b3ef17f027c57d"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1216,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_chemistry|0": {
"hashes": {
"hash_examples": "b0661bfa1add6404",
"hash_full_prompts": "b0661bfa1add6404",
"hash_input_tokens": "d893f482ded311c3",
"hash_cont_tokens": "4be369e6571e06dc"
},
"truncated": 0,
"non_truncated": 203,
"padded": 808,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_computer_science|0": {
"hashes": {
"hash_examples": "80fc1d623a3d665f",
"hash_full_prompts": "80fc1d623a3d665f",
"hash_input_tokens": "aea357a0d7c513d7",
"hash_cont_tokens": "35d4dba6f0826ba4"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_european_history|0": {
"hashes": {
"hash_examples": "854da6e5af0fe1a1",
"hash_full_prompts": "854da6e5af0fe1a1",
"hash_input_tokens": "43be94f23fd292f0",
"hash_cont_tokens": "63bd63bdc490b9ff"
},
"truncated": 0,
"non_truncated": 165,
"padded": 660,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_geography|0": {
"hashes": {
"hash_examples": "7dc963c7acd19ad8",
"hash_full_prompts": "7dc963c7acd19ad8",
"hash_input_tokens": "9c4e6a2190b16f8e",
"hash_cont_tokens": "999a32d098465441"
},
"truncated": 0,
"non_truncated": 198,
"padded": 788,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "1f675dcdebc9758f",
"hash_full_prompts": "1f675dcdebc9758f",
"hash_input_tokens": "ac4efc981943c848",
"hash_cont_tokens": "361410848e01f8ed"
},
"truncated": 0,
"non_truncated": 193,
"padded": 768,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "2fb32cf2d80f0b35",
"hash_full_prompts": "2fb32cf2d80f0b35",
"hash_input_tokens": "9d540ff2a6625078",
"hash_cont_tokens": "18f9ae57b2444806"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1540,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_mathematics|0": {
"hashes": {
"hash_examples": "fd6646fdb5d58a1f",
"hash_full_prompts": "fd6646fdb5d58a1f",
"hash_input_tokens": "8c7e21028bd5f97a",
"hash_cont_tokens": "a13496e646060699"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1064,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "2118f21f71d87d84",
"hash_full_prompts": "2118f21f71d87d84",
"hash_input_tokens": "705c23da5c2e02df",
"hash_cont_tokens": "791a7a25f0571e59"
},
"truncated": 0,
"non_truncated": 238,
"padded": 948,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_physics|0": {
"hashes": {
"hash_examples": "dc3ce06378548565",
"hash_full_prompts": "dc3ce06378548565",
"hash_input_tokens": "ab5d7ca7bb8f4db5",
"hash_cont_tokens": "9677b0687811cf73"
},
"truncated": 0,
"non_truncated": 151,
"padded": 596,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_psychology|0": {
"hashes": {
"hash_examples": "c8d1d98a40e11f2f",
"hash_full_prompts": "c8d1d98a40e11f2f",
"hash_input_tokens": "5214809e6ad94546",
"hash_cont_tokens": "c6fb64e5edd7d73b"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2148,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_statistics|0": {
"hashes": {
"hash_examples": "666c8759b98ee4ff",
"hash_full_prompts": "666c8759b98ee4ff",
"hash_input_tokens": "b760a504c3a0d4f1",
"hash_cont_tokens": "42603a72bb112e10"
},
"truncated": 0,
"non_truncated": 216,
"padded": 856,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_us_history|0": {
"hashes": {
"hash_examples": "95fef1c4b7d3f81e",
"hash_full_prompts": "95fef1c4b7d3f81e",
"hash_input_tokens": "213a2496b7720abf",
"hash_cont_tokens": "6807381135d65b04"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:high_school_world_history|0": {
"hashes": {
"hash_examples": "7e5085b6184b0322",
"hash_full_prompts": "7e5085b6184b0322",
"hash_input_tokens": "eeb45b715f3e193e",
"hash_cont_tokens": "6c355e7b3498b719"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:human_aging|0": {
"hashes": {
"hash_examples": "c17333e7c7c10797",
"hash_full_prompts": "c17333e7c7c10797",
"hash_input_tokens": "17894ad262d339b4",
"hash_cont_tokens": "0f40704815d5b3f6"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:human_sexuality|0": {
"hashes": {
"hash_examples": "4edd1e9045df5e3d",
"hash_full_prompts": "4edd1e9045df5e3d",
"hash_input_tokens": "23b648b53bca213e",
"hash_cont_tokens": "a9fdf5917bdddc9b"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:international_law|0": {
"hashes": {
"hash_examples": "db2fa00d771a062a",
"hash_full_prompts": "db2fa00d771a062a",
"hash_input_tokens": "0cc62fbf821e6244",
"hash_cont_tokens": "c63e45a81fbe97b2"
},
"truncated": 0,
"non_truncated": 121,
"padded": 476,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:jurisprudence|0": {
"hashes": {
"hash_examples": "e956f86b124076fe",
"hash_full_prompts": "e956f86b124076fe",
"hash_input_tokens": "1028c218c5c3c3a7",
"hash_cont_tokens": "9df89edb95ea3c08"
},
"truncated": 0,
"non_truncated": 108,
"padded": 428,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:logical_fallacies|0": {
"hashes": {
"hash_examples": "956e0e6365ab79f1",
"hash_full_prompts": "956e0e6365ab79f1",
"hash_input_tokens": "fcc96799bc03df65",
"hash_cont_tokens": "5b4f21454680a984"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:machine_learning|0": {
"hashes": {
"hash_examples": "397997cc6f4d581e",
"hash_full_prompts": "397997cc6f4d581e",
"hash_input_tokens": "c089cf31b2d7f01c",
"hash_cont_tokens": "3137ed354284f0e0"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:management|0": {
"hashes": {
"hash_examples": "2bcbe6f6ca63d740",
"hash_full_prompts": "2bcbe6f6ca63d740",
"hash_input_tokens": "559e19ff854815df",
"hash_cont_tokens": "1279a23b3bc7b32c"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:marketing|0": {
"hashes": {
"hash_examples": "8ddb20d964a1b065",
"hash_full_prompts": "8ddb20d964a1b065",
"hash_input_tokens": "4402ef89378afbe3",
"hash_cont_tokens": "be76778b3b861344"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:medical_genetics|0": {
"hashes": {
"hash_examples": "182a71f4763d2cea",
"hash_full_prompts": "182a71f4763d2cea",
"hash_input_tokens": "237012d95029840a",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:miscellaneous|0": {
"hashes": {
"hash_examples": "4c404fdbb4ca57fc",
"hash_full_prompts": "4c404fdbb4ca57fc",
"hash_input_tokens": "e62914e414087464",
"hash_cont_tokens": "656d10e3f49675ee"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3112,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:moral_disputes|0": {
"hashes": {
"hash_examples": "60cbd2baa3fea5c9",
"hash_full_prompts": "60cbd2baa3fea5c9",
"hash_input_tokens": "d0ce9621b7026bbb",
"hash_cont_tokens": "a208a34c74088f6c"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1364,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:moral_scenarios|0": {
"hashes": {
"hash_examples": "fd8b0431fbdd75ef",
"hash_full_prompts": "fd8b0431fbdd75ef",
"hash_input_tokens": "81679b9f674ee598",
"hash_cont_tokens": "996ce7a5b6c4aef1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:nutrition|0": {
"hashes": {
"hash_examples": "71e55e2b829b6528",
"hash_full_prompts": "71e55e2b829b6528",
"hash_input_tokens": "514a8da9bd4f8f36",
"hash_cont_tokens": "9d4280b06a73f2ad"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:philosophy|0": {
"hashes": {
"hash_examples": "a6d489a8d208fa4b",
"hash_full_prompts": "a6d489a8d208fa4b",
"hash_input_tokens": "cb7959f2144dc573",
"hash_cont_tokens": "de293b0c21c8bae6"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:prehistory|0": {
"hashes": {
"hash_examples": "6cc50f032a19acaa",
"hash_full_prompts": "6cc50f032a19acaa",
"hash_input_tokens": "f9fb897f661d2859",
"hash_cont_tokens": "ed0ff6b6c4caf978"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1284,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:professional_accounting|0": {
"hashes": {
"hash_examples": "50f57ab32f5f6cea",
"hash_full_prompts": "50f57ab32f5f6cea",
"hash_input_tokens": "ea1343072ee9d1a9",
"hash_cont_tokens": "83dd1489a0640048"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1108,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:professional_law|0": {
"hashes": {
"hash_examples": "a8fdc85c64f4b215",
"hash_full_prompts": "a8fdc85c64f4b215",
"hash_input_tokens": "61048bdaf47a14e6",
"hash_cont_tokens": "29b7a4e974f8d163"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:professional_medicine|0": {
"hashes": {
"hash_examples": "c373a28a3050a73a",
"hash_full_prompts": "c373a28a3050a73a",
"hash_input_tokens": "3ae4d064e9a0790f",
"hash_cont_tokens": "ad2cf95b8929c802"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:professional_psychology|0": {
"hashes": {
"hash_examples": "bf5254fe818356af",
"hash_full_prompts": "bf5254fe818356af",
"hash_input_tokens": "026f10b77b71a6f2",
"hash_cont_tokens": "7c6318f8538bddcf"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2416,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:public_relations|0": {
"hashes": {
"hash_examples": "b66d52e28e7d14e0",
"hash_full_prompts": "b66d52e28e7d14e0",
"hash_input_tokens": "225d550c82dd770c",
"hash_cont_tokens": "1d5a43cfd6444ac8"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:security_studies|0": {
"hashes": {
"hash_examples": "514c14feaf000ad9",
"hash_full_prompts": "514c14feaf000ad9",
"hash_input_tokens": "ae0c13a4688c6a19",
"hash_cont_tokens": "42a0363271aa0e45"
},
"truncated": 0,
"non_truncated": 245,
"padded": 972,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:sociology|0": {
"hashes": {
"hash_examples": "f6c9bc9d18c80870",
"hash_full_prompts": "f6c9bc9d18c80870",
"hash_input_tokens": "458d4766848b23c2",
"hash_cont_tokens": "6dcd97a97f690bc4"
},
"truncated": 0,
"non_truncated": 201,
"padded": 796,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:us_foreign_policy|0": {
"hashes": {
"hash_examples": "ed7b78629db6678f",
"hash_full_prompts": "ed7b78629db6678f",
"hash_input_tokens": "9dff9377e6bb7a8d",
"hash_cont_tokens": "77f9e45302a70de3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:virology|0": {
"hashes": {
"hash_examples": "bc52ffdc3f9b994a",
"hash_full_prompts": "bc52ffdc3f9b994a",
"hash_input_tokens": "74365771e0547aab",
"hash_cont_tokens": "324dfec5557d5fef"
},
"truncated": 0,
"non_truncated": 166,
"padded": 660,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_mc:world_religions|0": {
"hashes": {
"hash_examples": "ecdb4a4f94f62930",
"hash_full_prompts": "ecdb4a4f94f62930",
"hash_input_tokens": "4b7921c3a1f2c272",
"hash_cont_tokens": "488549eb5937c325"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|mmlu_pro_cloze|0": {
"hashes": {
"hash_examples": "845e15cfeee1fc11",
"hash_full_prompts": "845e15cfeee1fc11",
"hash_input_tokens": "d97fad769396572c",
"hash_cont_tokens": "cbb2b1dfdba77cc7"
},
"truncated": 0,
"non_truncated": 12032,
"padded": 111203,
"non_padded": 2792,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|openbookqa|0": {
"hashes": {
"hash_examples": "fd427af2ef0577e3",
"hash_full_prompts": "fd427af2ef0577e3",
"hash_input_tokens": "1c38e5461a17ffed",
"hash_cont_tokens": "39a126805e6138fc"
},
"truncated": 0,
"non_truncated": 500,
"padded": 1994,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|piqa|0": {
"hashes": {
"hash_examples": "f7e288a8894cd149",
"hash_full_prompts": "f7e288a8894cd149",
"hash_input_tokens": "17bff461520b9253",
"hash_cont_tokens": "bb302a379c07dc69"
},
"truncated": 0,
"non_truncated": 1838,
"padded": 3595,
"non_padded": 81,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|siqa|0": {
"hashes": {
"hash_examples": "c62abc8ecbd49cc4",
"hash_full_prompts": "c62abc8ecbd49cc4",
"hash_input_tokens": "623f4a3d34d19fac",
"hash_cont_tokens": "3ef06bdd7b8ff7a0"
},
"truncated": 0,
"non_truncated": 1954,
"padded": 5777,
"non_padded": 85,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|winogrande|0": {
"hashes": {
"hash_examples": "087d5d1a1afd4c7b",
"hash_full_prompts": "087d5d1a1afd4c7b",
"hash_input_tokens": "2073786b6c0bdc78",
"hash_cont_tokens": "f5eb053d43f78da1"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2531,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"custom|gsm8k|5": {
"hashes": {
"hash_examples": "0ed016e24e7512fd",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "fc45e01856de5296",
"hash_cont_tokens": "316a05937b0f88f2"
},
"truncated": 1319,
"non_truncated": 0,
"padded": 955,
"non_padded": 364,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"custom|trivia_qa|0": {
"hashes": {
"hash_examples": "1e083041cb75ff0c",
"hash_full_prompts": "1e083041cb75ff0c",
"hash_input_tokens": "c9340e17ef103ae3",
"hash_cont_tokens": "5cce0ca4351baf81"
},
"truncated": 17944,
"non_truncated": 0,
"padded": 283,
"non_padded": 17661,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "88e809e91c59a89c",
"hash_full_prompts": "60e948f5cd4b4b2e",
"hash_input_tokens": "2817d822d799cf5b",
"hash_cont_tokens": "86bbe5ffe0d31a92"
},
"truncated": 19263,
"non_truncated": 60486,
"padded": 298164,
"non_padded": 21963,
"num_truncated_few_shots": 0
}
}