|
{ |
|
"results": { |
|
"arc_challenge": { |
|
"alias": "arc_challenge", |
|
"acc,none": 0.2167235494880546, |
|
"acc_stderr,none": 0.01204015671348119, |
|
"acc_norm,none": 0.24658703071672355, |
|
"acc_norm_stderr,none": 0.012595726268790115 |
|
}, |
|
"arc_easy": { |
|
"alias": "arc_easy", |
|
"acc,none": 0.27314814814814814, |
|
"acc_stderr,none": 0.009143032718360342, |
|
"acc_norm,none": 0.2521043771043771, |
|
"acc_norm_stderr,none": 0.008910024163218178 |
|
}, |
|
"blimp": { |
|
"acc,none": 0.5264925373134327, |
|
"acc_stderr,none": 0.0018733276049717608, |
|
"alias": "blimp" |
|
}, |
|
"blimp_adjunct_island": { |
|
"alias": " - blimp_adjunct_island", |
|
"acc,none": 0.535, |
|
"acc_stderr,none": 0.015780495050030156 |
|
}, |
|
"blimp_anaphor_gender_agreement": { |
|
"alias": " - blimp_anaphor_gender_agreement", |
|
"acc,none": 0.616, |
|
"acc_stderr,none": 0.015387682761897071 |
|
}, |
|
"blimp_anaphor_number_agreement": { |
|
"alias": " - blimp_anaphor_number_agreement", |
|
"acc,none": 0.563, |
|
"acc_stderr,none": 0.015693223928730373 |
|
}, |
|
"blimp_animate_subject_passive": { |
|
"alias": " - blimp_animate_subject_passive", |
|
"acc,none": 0.606, |
|
"acc_stderr,none": 0.015459721957493377 |
|
}, |
|
"blimp_animate_subject_trans": { |
|
"alias": " - blimp_animate_subject_trans", |
|
"acc,none": 0.802, |
|
"acc_stderr,none": 0.0126077339341753 |
|
}, |
|
"blimp_causative": { |
|
"alias": " - blimp_causative", |
|
"acc,none": 0.395, |
|
"acc_stderr,none": 0.015466551464829345 |
|
}, |
|
"blimp_complex_NP_island": { |
|
"alias": " - blimp_complex_NP_island", |
|
"acc,none": 0.472, |
|
"acc_stderr,none": 0.015794475789511476 |
|
}, |
|
"blimp_coordinate_structure_constraint_complex_left_branch": { |
|
"alias": " - blimp_coordinate_structure_constraint_complex_left_branch", |
|
"acc,none": 0.526, |
|
"acc_stderr,none": 0.015797897758042766 |
|
}, |
|
"blimp_coordinate_structure_constraint_object_extraction": { |
|
"alias": " - blimp_coordinate_structure_constraint_object_extraction", |
|
"acc,none": 0.631, |
|
"acc_stderr,none": 0.015266698139154617 |
|
}, |
|
"blimp_determiner_noun_agreement_1": { |
|
"alias": " - blimp_determiner_noun_agreement_1", |
|
"acc,none": 0.51, |
|
"acc_stderr,none": 0.0158161357527732 |
|
}, |
|
"blimp_determiner_noun_agreement_2": { |
|
"alias": " - blimp_determiner_noun_agreement_2", |
|
"acc,none": 0.516, |
|
"acc_stderr,none": 0.015811198373114878 |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_1": { |
|
"alias": " - blimp_determiner_noun_agreement_irregular_1", |
|
"acc,none": 0.494, |
|
"acc_stderr,none": 0.015818160898606715 |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_2": { |
|
"alias": " - blimp_determiner_noun_agreement_irregular_2", |
|
"acc,none": 0.487, |
|
"acc_stderr,none": 0.015813952101896626 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_2": { |
|
"alias": " - blimp_determiner_noun_agreement_with_adj_2", |
|
"acc,none": 0.509, |
|
"acc_stderr,none": 0.015816736995005392 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1": { |
|
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1", |
|
"acc,none": 0.491, |
|
"acc_stderr,none": 0.015816736995005392 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2": { |
|
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2", |
|
"acc,none": 0.495, |
|
"acc_stderr,none": 0.01581850894443666 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adjective_1": { |
|
"alias": " - blimp_determiner_noun_agreement_with_adjective_1", |
|
"acc,none": 0.51, |
|
"acc_stderr,none": 0.0158161357527732 |
|
}, |
|
"blimp_distractor_agreement_relational_noun": { |
|
"alias": " - blimp_distractor_agreement_relational_noun", |
|
"acc,none": 0.511, |
|
"acc_stderr,none": 0.015815471195292682 |
|
}, |
|
"blimp_distractor_agreement_relative_clause": { |
|
"alias": " - blimp_distractor_agreement_relative_clause", |
|
"acc,none": 0.496, |
|
"acc_stderr,none": 0.015818793703510886 |
|
}, |
|
"blimp_drop_argument": { |
|
"alias": " - blimp_drop_argument", |
|
"acc,none": 0.663, |
|
"acc_stderr,none": 0.014955087918653607 |
|
}, |
|
"blimp_ellipsis_n_bar_1": { |
|
"alias": " - blimp_ellipsis_n_bar_1", |
|
"acc,none": 0.493, |
|
"acc_stderr,none": 0.01581774956184357 |
|
}, |
|
"blimp_ellipsis_n_bar_2": { |
|
"alias": " - blimp_ellipsis_n_bar_2", |
|
"acc,none": 0.327, |
|
"acc_stderr,none": 0.014842213153411245 |
|
}, |
|
"blimp_existential_there_object_raising": { |
|
"alias": " - blimp_existential_there_object_raising", |
|
"acc,none": 0.612, |
|
"acc_stderr,none": 0.015417317979911077 |
|
}, |
|
"blimp_existential_there_quantifiers_1": { |
|
"alias": " - blimp_existential_there_quantifiers_1", |
|
"acc,none": 0.908, |
|
"acc_stderr,none": 0.00914437639315112 |
|
}, |
|
"blimp_existential_there_quantifiers_2": { |
|
"alias": " - blimp_existential_there_quantifiers_2", |
|
"acc,none": 0.611, |
|
"acc_stderr,none": 0.015424555647308495 |
|
}, |
|
"blimp_existential_there_subject_raising": { |
|
"alias": " - blimp_existential_there_subject_raising", |
|
"acc,none": 0.533, |
|
"acc_stderr,none": 0.01578480789113878 |
|
}, |
|
"blimp_expletive_it_object_raising": { |
|
"alias": " - blimp_expletive_it_object_raising", |
|
"acc,none": 0.569, |
|
"acc_stderr,none": 0.015667944488173494 |
|
}, |
|
"blimp_inchoative": { |
|
"alias": " - blimp_inchoative", |
|
"acc,none": 0.388, |
|
"acc_stderr,none": 0.015417317979911081 |
|
}, |
|
"blimp_intransitive": { |
|
"alias": " - blimp_intransitive", |
|
"acc,none": 0.557, |
|
"acc_stderr,none": 0.015716169953204105 |
|
}, |
|
"blimp_irregular_past_participle_adjectives": { |
|
"alias": " - blimp_irregular_past_participle_adjectives", |
|
"acc,none": 0.294, |
|
"acc_stderr,none": 0.01441429054000822 |
|
}, |
|
"blimp_irregular_past_participle_verbs": { |
|
"alias": " - blimp_irregular_past_participle_verbs", |
|
"acc,none": 0.464, |
|
"acc_stderr,none": 0.015778243024904586 |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_1": { |
|
"alias": " - blimp_irregular_plural_subject_verb_agreement_1", |
|
"acc,none": 0.495, |
|
"acc_stderr,none": 0.015818508944436652 |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_2": { |
|
"alias": " - blimp_irregular_plural_subject_verb_agreement_2", |
|
"acc,none": 0.525, |
|
"acc_stderr,none": 0.015799513429996026 |
|
}, |
|
"blimp_left_branch_island_echo_question": { |
|
"alias": " - blimp_left_branch_island_echo_question", |
|
"acc,none": 0.597, |
|
"acc_stderr,none": 0.015518757419066533 |
|
}, |
|
"blimp_left_branch_island_simple_question": { |
|
"alias": " - blimp_left_branch_island_simple_question", |
|
"acc,none": 0.511, |
|
"acc_stderr,none": 0.01581547119529269 |
|
}, |
|
"blimp_matrix_question_npi_licensor_present": { |
|
"alias": " - blimp_matrix_question_npi_licensor_present", |
|
"acc,none": 0.352, |
|
"acc_stderr,none": 0.015110404505648675 |
|
}, |
|
"blimp_npi_present_1": { |
|
"alias": " - blimp_npi_present_1", |
|
"acc,none": 0.402, |
|
"acc_stderr,none": 0.015512467135715077 |
|
}, |
|
"blimp_npi_present_2": { |
|
"alias": " - blimp_npi_present_2", |
|
"acc,none": 0.335, |
|
"acc_stderr,none": 0.014933117490932577 |
|
}, |
|
"blimp_only_npi_licensor_present": { |
|
"alias": " - blimp_only_npi_licensor_present", |
|
"acc,none": 0.377, |
|
"acc_stderr,none": 0.015333170125779855 |
|
}, |
|
"blimp_only_npi_scope": { |
|
"alias": " - blimp_only_npi_scope", |
|
"acc,none": 0.599, |
|
"acc_stderr,none": 0.015506109745498322 |
|
}, |
|
"blimp_passive_1": { |
|
"alias": " - blimp_passive_1", |
|
"acc,none": 0.661, |
|
"acc_stderr,none": 0.014976758771620345 |
|
}, |
|
"blimp_passive_2": { |
|
"alias": " - blimp_passive_2", |
|
"acc,none": 0.602, |
|
"acc_stderr,none": 0.01548663410285892 |
|
}, |
|
"blimp_principle_A_c_command": { |
|
"alias": " - blimp_principle_A_c_command", |
|
"acc,none": 0.318, |
|
"acc_stderr,none": 0.014734079309311903 |
|
}, |
|
"blimp_principle_A_case_1": { |
|
"alias": " - blimp_principle_A_case_1", |
|
"acc,none": 0.845, |
|
"acc_stderr,none": 0.01145015747079947 |
|
}, |
|
"blimp_principle_A_case_2": { |
|
"alias": " - blimp_principle_A_case_2", |
|
"acc,none": 0.5, |
|
"acc_stderr,none": 0.015819299929208316 |
|
}, |
|
"blimp_principle_A_domain_1": { |
|
"alias": " - blimp_principle_A_domain_1", |
|
"acc,none": 0.526, |
|
"acc_stderr,none": 0.01579789775804274 |
|
}, |
|
"blimp_principle_A_domain_2": { |
|
"alias": " - blimp_principle_A_domain_2", |
|
"acc,none": 0.512, |
|
"acc_stderr,none": 0.015814743314581818 |
|
}, |
|
"blimp_principle_A_domain_3": { |
|
"alias": " - blimp_principle_A_domain_3", |
|
"acc,none": 0.509, |
|
"acc_stderr,none": 0.015816736995005392 |
|
}, |
|
"blimp_principle_A_reconstruction": { |
|
"alias": " - blimp_principle_A_reconstruction", |
|
"acc,none": 0.448, |
|
"acc_stderr,none": 0.015733516566347833 |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_1": { |
|
"alias": " - blimp_regular_plural_subject_verb_agreement_1", |
|
"acc,none": 0.387, |
|
"acc_stderr,none": 0.015410011955493933 |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_2": { |
|
"alias": " - blimp_regular_plural_subject_verb_agreement_2", |
|
"acc,none": 0.514, |
|
"acc_stderr,none": 0.01581309754773099 |
|
}, |
|
"blimp_sentential_negation_npi_licensor_present": { |
|
"alias": " - blimp_sentential_negation_npi_licensor_present", |
|
"acc,none": 0.642, |
|
"acc_stderr,none": 0.01516792886540756 |
|
}, |
|
"blimp_sentential_negation_npi_scope": { |
|
"alias": " - blimp_sentential_negation_npi_scope", |
|
"acc,none": 0.724, |
|
"acc_stderr,none": 0.014142984975740673 |
|
}, |
|
"blimp_sentential_subject_island": { |
|
"alias": " - blimp_sentential_subject_island", |
|
"acc,none": 0.461, |
|
"acc_stderr,none": 0.01577110420128319 |
|
}, |
|
"blimp_superlative_quantifiers_1": { |
|
"alias": " - blimp_superlative_quantifiers_1", |
|
"acc,none": 0.781, |
|
"acc_stderr,none": 0.013084731950262024 |
|
}, |
|
"blimp_superlative_quantifiers_2": { |
|
"alias": " - blimp_superlative_quantifiers_2", |
|
"acc,none": 0.62, |
|
"acc_stderr,none": 0.015356947477797585 |
|
}, |
|
"blimp_tough_vs_raising_1": { |
|
"alias": " - blimp_tough_vs_raising_1", |
|
"acc,none": 0.413, |
|
"acc_stderr,none": 0.01557798682993653 |
|
}, |
|
"blimp_tough_vs_raising_2": { |
|
"alias": " - blimp_tough_vs_raising_2", |
|
"acc,none": 0.614, |
|
"acc_stderr,none": 0.01540263747678438 |
|
}, |
|
"blimp_transitive": { |
|
"alias": " - blimp_transitive", |
|
"acc,none": 0.518, |
|
"acc_stderr,none": 0.015809045699406728 |
|
}, |
|
"blimp_wh_island": { |
|
"alias": " - blimp_wh_island", |
|
"acc,none": 0.613, |
|
"acc_stderr,none": 0.015410011955493932 |
|
}, |
|
"blimp_wh_questions_object_gap": { |
|
"alias": " - blimp_wh_questions_object_gap", |
|
"acc,none": 0.484, |
|
"acc_stderr,none": 0.015811198373114878 |
|
}, |
|
"blimp_wh_questions_subject_gap": { |
|
"alias": " - blimp_wh_questions_subject_gap", |
|
"acc,none": 0.44, |
|
"acc_stderr,none": 0.015704987954361798 |
|
}, |
|
"blimp_wh_questions_subject_gap_long_distance": { |
|
"alias": " - blimp_wh_questions_subject_gap_long_distance", |
|
"acc,none": 0.407, |
|
"acc_stderr,none": 0.015543249100255542 |
|
}, |
|
"blimp_wh_vs_that_no_gap": { |
|
"alias": " - blimp_wh_vs_that_no_gap", |
|
"acc,none": 0.375, |
|
"acc_stderr,none": 0.015316971293620996 |
|
}, |
|
"blimp_wh_vs_that_no_gap_long_distance": { |
|
"alias": " - blimp_wh_vs_that_no_gap_long_distance", |
|
"acc,none": 0.418, |
|
"acc_stderr,none": 0.01560511196754195 |
|
}, |
|
"blimp_wh_vs_that_with_gap": { |
|
"alias": " - blimp_wh_vs_that_with_gap", |
|
"acc,none": 0.604, |
|
"acc_stderr,none": 0.015473313265859406 |
|
}, |
|
"blimp_wh_vs_that_with_gap_long_distance": { |
|
"alias": " - blimp_wh_vs_that_with_gap_long_distance", |
|
"acc,none": 0.562, |
|
"acc_stderr,none": 0.01569721001969469 |
|
}, |
|
"lambada_openai": { |
|
"alias": "lambada_openai", |
|
"perplexity,none": 3684040.8180097938, |
|
"perplexity_stderr,none": 360795.68723754253, |
|
"acc,none": 0.0, |
|
"acc_stderr,none": 0.0 |
|
}, |
|
"logiqa": { |
|
"alias": "logiqa", |
|
"acc,none": 0.22734254992319508, |
|
"acc_stderr,none": 0.016439067675117738, |
|
"acc_norm,none": 0.2457757296466974, |
|
"acc_norm_stderr,none": 0.016887410894296923 |
|
}, |
|
"mmlu": { |
|
"acc,none": 0.2469733656174334, |
|
"acc_stderr,none": 0.0036372949607496293, |
|
"alias": "mmlu" |
|
}, |
|
"mmlu_humanities": { |
|
"acc,none": 0.24484590860786398, |
|
"acc_stderr,none": 0.006272498497245472, |
|
"alias": " - humanities" |
|
}, |
|
"mmlu_formal_logic": { |
|
"alias": " - formal_logic", |
|
"acc,none": 0.2619047619047619, |
|
"acc_stderr,none": 0.0393253768039287 |
|
}, |
|
"mmlu_high_school_european_history": { |
|
"alias": " - high_school_european_history", |
|
"acc,none": 0.2545454545454545, |
|
"acc_stderr,none": 0.034015067152490405 |
|
}, |
|
"mmlu_high_school_us_history": { |
|
"alias": " - high_school_us_history", |
|
"acc,none": 0.23039215686274508, |
|
"acc_stderr,none": 0.029554292605695066 |
|
}, |
|
"mmlu_high_school_world_history": { |
|
"alias": " - high_school_world_history", |
|
"acc,none": 0.25738396624472576, |
|
"acc_stderr,none": 0.02845882099146031 |
|
}, |
|
"mmlu_international_law": { |
|
"alias": " - international_law", |
|
"acc,none": 0.2809917355371901, |
|
"acc_stderr,none": 0.04103203830514512 |
|
}, |
|
"mmlu_jurisprudence": { |
|
"alias": " - jurisprudence", |
|
"acc,none": 0.28703703703703703, |
|
"acc_stderr,none": 0.043733130409147614 |
|
}, |
|
"mmlu_logical_fallacies": { |
|
"alias": " - logical_fallacies", |
|
"acc,none": 0.24539877300613497, |
|
"acc_stderr,none": 0.03380939813943354 |
|
}, |
|
"mmlu_moral_disputes": { |
|
"alias": " - moral_disputes", |
|
"acc,none": 0.2630057803468208, |
|
"acc_stderr,none": 0.023703099525258182 |
|
}, |
|
"mmlu_moral_scenarios": { |
|
"alias": " - moral_scenarios", |
|
"acc,none": 0.2446927374301676, |
|
"acc_stderr,none": 0.014378169884098417 |
|
}, |
|
"mmlu_philosophy": { |
|
"alias": " - philosophy", |
|
"acc,none": 0.2765273311897106, |
|
"acc_stderr,none": 0.02540383297817961 |
|
}, |
|
"mmlu_prehistory": { |
|
"alias": " - prehistory", |
|
"acc,none": 0.24074074074074073, |
|
"acc_stderr,none": 0.02378858355165854 |
|
}, |
|
"mmlu_professional_law": { |
|
"alias": " - professional_law", |
|
"acc,none": 0.227509778357236, |
|
"acc_stderr,none": 0.01070718857686424 |
|
}, |
|
"mmlu_world_religions": { |
|
"alias": " - world_religions", |
|
"acc,none": 0.23976608187134502, |
|
"acc_stderr,none": 0.03274485211946956 |
|
}, |
|
"mmlu_other": { |
|
"acc,none": 0.25458641776633406, |
|
"acc_stderr,none": 0.007787044164107081, |
|
"alias": " - other" |
|
}, |
|
"mmlu_business_ethics": { |
|
"alias": " - business_ethics", |
|
"acc,none": 0.24, |
|
"acc_stderr,none": 0.04292346959909284 |
|
}, |
|
"mmlu_clinical_knowledge": { |
|
"alias": " - clinical_knowledge", |
|
"acc,none": 0.17358490566037735, |
|
"acc_stderr,none": 0.02331058302600627 |
|
}, |
|
"mmlu_college_medicine": { |
|
"alias": " - college_medicine", |
|
"acc,none": 0.20809248554913296, |
|
"acc_stderr,none": 0.030952890217749895 |
|
}, |
|
"mmlu_global_facts": { |
|
"alias": " - global_facts", |
|
"acc,none": 0.27, |
|
"acc_stderr,none": 0.044619604333847394 |
|
}, |
|
"mmlu_human_aging": { |
|
"alias": " - human_aging", |
|
"acc,none": 0.35874439461883406, |
|
"acc_stderr,none": 0.03219079200419996 |
|
}, |
|
"mmlu_management": { |
|
"alias": " - management", |
|
"acc,none": 0.18446601941747573, |
|
"acc_stderr,none": 0.03840423627288276 |
|
}, |
|
"mmlu_marketing": { |
|
"alias": " - marketing", |
|
"acc,none": 0.24786324786324787, |
|
"acc_stderr,none": 0.028286324075564372 |
|
}, |
|
"mmlu_medical_genetics": { |
|
"alias": " - medical_genetics", |
|
"acc,none": 0.22, |
|
"acc_stderr,none": 0.04163331998932269 |
|
}, |
|
"mmlu_miscellaneous": { |
|
"alias": " - miscellaneous", |
|
"acc,none": 0.28735632183908044, |
|
"acc_stderr,none": 0.0161824107306827 |
|
}, |
|
"mmlu_nutrition": { |
|
"alias": " - nutrition", |
|
"acc,none": 0.23202614379084968, |
|
"acc_stderr,none": 0.024170840879341016 |
|
}, |
|
"mmlu_professional_accounting": { |
|
"alias": " - professional_accounting", |
|
"acc,none": 0.2730496453900709, |
|
"acc_stderr,none": 0.026577860943307857 |
|
}, |
|
"mmlu_professional_medicine": { |
|
"alias": " - professional_medicine", |
|
"acc,none": 0.22058823529411764, |
|
"acc_stderr,none": 0.025187786660227245 |
|
}, |
|
"mmlu_virology": { |
|
"alias": " - virology", |
|
"acc,none": 0.27710843373493976, |
|
"acc_stderr,none": 0.03484331592680588 |
|
}, |
|
"mmlu_social_sciences": { |
|
"acc,none": 0.23691907702307444, |
|
"acc_stderr,none": 0.007668080552192554, |
|
"alias": " - social sciences" |
|
}, |
|
"mmlu_econometrics": { |
|
"alias": " - econometrics", |
|
"acc,none": 0.21929824561403508, |
|
"acc_stderr,none": 0.03892431106518753 |
|
}, |
|
"mmlu_high_school_geography": { |
|
"alias": " - high_school_geography", |
|
"acc,none": 0.21212121212121213, |
|
"acc_stderr,none": 0.029126522834586815 |
|
}, |
|
"mmlu_high_school_government_and_politics": { |
|
"alias": " - high_school_government_and_politics", |
|
"acc,none": 0.20725388601036268, |
|
"acc_stderr,none": 0.029252823291803624 |
|
}, |
|
"mmlu_high_school_macroeconomics": { |
|
"alias": " - high_school_macroeconomics", |
|
"acc,none": 0.23333333333333334, |
|
"acc_stderr,none": 0.02144454730156048 |
|
}, |
|
"mmlu_high_school_microeconomics": { |
|
"alias": " - high_school_microeconomics", |
|
"acc,none": 0.23949579831932774, |
|
"acc_stderr,none": 0.027722065493361252 |
|
}, |
|
"mmlu_high_school_psychology": { |
|
"alias": " - high_school_psychology", |
|
"acc,none": 0.25137614678899084, |
|
"acc_stderr,none": 0.018599206360287415 |
|
}, |
|
"mmlu_human_sexuality": { |
|
"alias": " - human_sexuality", |
|
"acc,none": 0.22900763358778625, |
|
"acc_stderr,none": 0.036853466317118506 |
|
}, |
|
"mmlu_professional_psychology": { |
|
"alias": " - professional_psychology", |
|
"acc,none": 0.25980392156862747, |
|
"acc_stderr,none": 0.01774089950917779 |
|
}, |
|
"mmlu_public_relations": { |
|
"alias": " - public_relations", |
|
"acc,none": 0.2636363636363636, |
|
"acc_stderr,none": 0.04220224692971987 |
|
}, |
|
"mmlu_security_studies": { |
|
"alias": " - security_studies", |
|
"acc,none": 0.1836734693877551, |
|
"acc_stderr,none": 0.024789071332007636 |
|
}, |
|
"mmlu_sociology": { |
|
"alias": " - sociology", |
|
"acc,none": 0.22885572139303484, |
|
"acc_stderr,none": 0.029705284056772443 |
|
}, |
|
"mmlu_us_foreign_policy": { |
|
"alias": " - us_foreign_policy", |
|
"acc,none": 0.28, |
|
"acc_stderr,none": 0.045126085985421255 |
|
}, |
|
"mmlu_stem": { |
|
"acc,none": 0.2524579765302886, |
|
"acc_stderr,none": 0.007740429774144842, |
|
"alias": " - stem" |
|
}, |
|
"mmlu_abstract_algebra": { |
|
"alias": " - abstract_algebra", |
|
"acc,none": 0.32, |
|
"acc_stderr,none": 0.04688261722621504 |
|
}, |
|
"mmlu_anatomy": { |
|
"alias": " - anatomy", |
|
"acc,none": 0.28888888888888886, |
|
"acc_stderr,none": 0.0391545063041425 |
|
}, |
|
"mmlu_astronomy": { |
|
"alias": " - astronomy", |
|
"acc,none": 0.23026315789473684, |
|
"acc_stderr,none": 0.034260594244031654 |
|
}, |
|
"mmlu_college_biology": { |
|
"alias": " - college_biology", |
|
"acc,none": 0.25, |
|
"acc_stderr,none": 0.03621034121889507 |
|
}, |
|
"mmlu_college_chemistry": { |
|
"alias": " - college_chemistry", |
|
"acc,none": 0.26, |
|
"acc_stderr,none": 0.0440844002276808 |
|
}, |
|
"mmlu_college_computer_science": { |
|
"alias": " - college_computer_science", |
|
"acc,none": 0.18, |
|
"acc_stderr,none": 0.03861229196653695 |
|
}, |
|
"mmlu_college_mathematics": { |
|
"alias": " - college_mathematics", |
|
"acc,none": 0.24, |
|
"acc_stderr,none": 0.04292346959909281 |
|
}, |
|
"mmlu_college_physics": { |
|
"alias": " - college_physics", |
|
"acc,none": 0.28431372549019607, |
|
"acc_stderr,none": 0.04488482852329017 |
|
}, |
|
"mmlu_computer_security": { |
|
"alias": " - computer_security", |
|
"acc,none": 0.25, |
|
"acc_stderr,none": 0.04351941398892446 |
|
}, |
|
"mmlu_conceptual_physics": { |
|
"alias": " - conceptual_physics", |
|
"acc,none": 0.2765957446808511, |
|
"acc_stderr,none": 0.02924188386962882 |
|
}, |
|
"mmlu_electrical_engineering": { |
|
"alias": " - electrical_engineering", |
|
"acc,none": 0.23448275862068965, |
|
"acc_stderr,none": 0.035306258743465914 |
|
}, |
|
"mmlu_elementary_mathematics": { |
|
"alias": " - elementary_mathematics", |
|
"acc,none": 0.2751322751322751, |
|
"acc_stderr,none": 0.023000086859068646 |
|
}, |
|
"mmlu_high_school_biology": { |
|
"alias": " - high_school_biology", |
|
"acc,none": 0.25161290322580643, |
|
"acc_stderr,none": 0.024685979286239956 |
|
}, |
|
"mmlu_high_school_chemistry": { |
|
"alias": " - high_school_chemistry", |
|
"acc,none": 0.2660098522167488, |
|
"acc_stderr,none": 0.03108982600293752 |
|
}, |
|
"mmlu_high_school_computer_science": { |
|
"alias": " - high_school_computer_science", |
|
"acc,none": 0.27, |
|
"acc_stderr,none": 0.0446196043338474 |
|
}, |
|
"mmlu_high_school_mathematics": { |
|
"alias": " - high_school_mathematics", |
|
"acc,none": 0.26666666666666666, |
|
"acc_stderr,none": 0.026962424325073828 |
|
}, |
|
"mmlu_high_school_physics": { |
|
"alias": " - high_school_physics", |
|
"acc,none": 0.2119205298013245, |
|
"acc_stderr,none": 0.03336767086567977 |
|
}, |
|
"mmlu_high_school_statistics": { |
|
"alias": " - high_school_statistics", |
|
"acc,none": 0.19444444444444445, |
|
"acc_stderr,none": 0.026991454502036716 |
|
}, |
|
"mmlu_machine_learning": { |
|
"alias": " - machine_learning", |
|
"acc,none": 0.21428571428571427, |
|
"acc_stderr,none": 0.038946411200447915 |
|
}, |
|
"piqa": { |
|
"alias": "piqa", |
|
"acc,none": 0.5239390642002176, |
|
"acc_stderr,none": 0.011652445621079262, |
|
"acc_norm,none": 0.5195865070729053, |
|
"acc_norm_stderr,none": 0.011656869979288456 |
|
}, |
|
"sciq": { |
|
"alias": "sciq", |
|
"acc,none": 0.194, |
|
"acc_stderr,none": 0.01251081614126436, |
|
"acc_norm,none": 0.215, |
|
"acc_norm_stderr,none": 0.012997843819031834 |
|
}, |
|
"wikitext": { |
|
"alias": "wikitext", |
|
"word_perplexity,none": 327214.2701179993, |
|
"word_perplexity_stderr,none": "N/A", |
|
"byte_perplexity,none": 10.747345119448008, |
|
"byte_perplexity_stderr,none": "N/A", |
|
"bits_per_byte,none": 3.425908414604779, |
|
"bits_per_byte_stderr,none": "N/A" |
|
}, |
|
"winogrande": { |
|
"alias": "winogrande", |
|
"acc,none": 0.4980268350434096, |
|
"acc_stderr,none": 0.014052376259225629 |
|
}, |
|
"wsc": { |
|
"alias": "wsc", |
|
"acc,none": 0.5865384615384616, |
|
"acc_stderr,none": 0.04852294969729053 |
|
} |
|
}, |
|
"groups": { |
|
"blimp": { |
|
"acc,none": 0.5264925373134327, |
|
"acc_stderr,none": 0.0018733276049717608, |
|
"alias": "blimp" |
|
}, |
|
"mmlu": { |
|
"acc,none": 0.2469733656174334, |
|
"acc_stderr,none": 0.0036372949607496293, |
|
"alias": "mmlu" |
|
}, |
|
"mmlu_humanities": { |
|
"acc,none": 0.24484590860786398, |
|
"acc_stderr,none": 0.006272498497245472, |
|
"alias": " - humanities" |
|
}, |
|
"mmlu_other": { |
|
"acc,none": 0.25458641776633406, |
|
"acc_stderr,none": 0.007787044164107081, |
|
"alias": " - other" |
|
}, |
|
"mmlu_social_sciences": { |
|
"acc,none": 0.23691907702307444, |
|
"acc_stderr,none": 0.007668080552192554, |
|
"alias": " - social sciences" |
|
}, |
|
"mmlu_stem": { |
|
"acc,none": 0.2524579765302886, |
|
"acc_stderr,none": 0.007740429774144842, |
|
"alias": " - stem" |
|
} |
|
}, |
|
"group_subtasks": { |
|
"arc_easy": [], |
|
"arc_challenge": [], |
|
"blimp": [ |
|
"blimp_adjunct_island", |
|
"blimp_anaphor_gender_agreement", |
|
"blimp_anaphor_number_agreement", |
|
"blimp_animate_subject_passive", |
|
"blimp_animate_subject_trans", |
|
"blimp_causative", |
|
"blimp_complex_NP_island", |
|
"blimp_coordinate_structure_constraint_complex_left_branch", |
|
"blimp_coordinate_structure_constraint_object_extraction", |
|
"blimp_determiner_noun_agreement_1", |
|
"blimp_determiner_noun_agreement_2", |
|
"blimp_determiner_noun_agreement_irregular_1", |
|
"blimp_determiner_noun_agreement_irregular_2", |
|
"blimp_determiner_noun_agreement_with_adj_2", |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1", |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2", |
|
"blimp_determiner_noun_agreement_with_adjective_1", |
|
"blimp_distractor_agreement_relational_noun", |
|
"blimp_distractor_agreement_relative_clause", |
|
"blimp_drop_argument", |
|
"blimp_ellipsis_n_bar_1", |
|
"blimp_ellipsis_n_bar_2", |
|
"blimp_existential_there_object_raising", |
|
"blimp_existential_there_quantifiers_1", |
|
"blimp_existential_there_quantifiers_2", |
|
"blimp_existential_there_subject_raising", |
|
"blimp_expletive_it_object_raising", |
|
"blimp_inchoative", |
|
"blimp_intransitive", |
|
"blimp_irregular_past_participle_adjectives", |
|
"blimp_irregular_past_participle_verbs", |
|
"blimp_irregular_plural_subject_verb_agreement_1", |
|
"blimp_irregular_plural_subject_verb_agreement_2", |
|
"blimp_left_branch_island_echo_question", |
|
"blimp_left_branch_island_simple_question", |
|
"blimp_matrix_question_npi_licensor_present", |
|
"blimp_npi_present_1", |
|
"blimp_npi_present_2", |
|
"blimp_only_npi_licensor_present", |
|
"blimp_only_npi_scope", |
|
"blimp_passive_1", |
|
"blimp_passive_2", |
|
"blimp_principle_A_c_command", |
|
"blimp_principle_A_case_1", |
|
"blimp_principle_A_case_2", |
|
"blimp_principle_A_domain_1", |
|
"blimp_principle_A_domain_2", |
|
"blimp_principle_A_domain_3", |
|
"blimp_principle_A_reconstruction", |
|
"blimp_regular_plural_subject_verb_agreement_1", |
|
"blimp_regular_plural_subject_verb_agreement_2", |
|
"blimp_sentential_negation_npi_licensor_present", |
|
"blimp_sentential_negation_npi_scope", |
|
"blimp_sentential_subject_island", |
|
"blimp_superlative_quantifiers_1", |
|
"blimp_superlative_quantifiers_2", |
|
"blimp_tough_vs_raising_1", |
|
"blimp_tough_vs_raising_2", |
|
"blimp_transitive", |
|
"blimp_wh_island", |
|
"blimp_wh_questions_object_gap", |
|
"blimp_wh_questions_subject_gap", |
|
"blimp_wh_questions_subject_gap_long_distance", |
|
"blimp_wh_vs_that_no_gap", |
|
"blimp_wh_vs_that_no_gap_long_distance", |
|
"blimp_wh_vs_that_with_gap", |
|
"blimp_wh_vs_that_with_gap_long_distance" |
|
], |
|
"lambada_openai": [], |
|
"logiqa": [], |
|
"mmlu_humanities": [ |
|
"mmlu_moral_disputes", |
|
"mmlu_high_school_world_history", |
|
"mmlu_jurisprudence", |
|
"mmlu_philosophy", |
|
"mmlu_high_school_us_history", |
|
"mmlu_professional_law", |
|
"mmlu_logical_fallacies", |
|
"mmlu_moral_scenarios", |
|
"mmlu_formal_logic", |
|
"mmlu_prehistory", |
|
"mmlu_high_school_european_history", |
|
"mmlu_world_religions", |
|
"mmlu_international_law" |
|
], |
|
"mmlu_social_sciences": [ |
|
"mmlu_us_foreign_policy", |
|
"mmlu_sociology", |
|
"mmlu_econometrics", |
|
"mmlu_security_studies", |
|
"mmlu_high_school_geography", |
|
"mmlu_public_relations", |
|
"mmlu_high_school_microeconomics", |
|
"mmlu_professional_psychology", |
|
"mmlu_high_school_macroeconomics", |
|
"mmlu_human_sexuality", |
|
"mmlu_high_school_government_and_politics", |
|
"mmlu_high_school_psychology" |
|
], |
|
"mmlu_other": [ |
|
"mmlu_college_medicine", |
|
"mmlu_medical_genetics", |
|
"mmlu_business_ethics", |
|
"mmlu_miscellaneous", |
|
"mmlu_nutrition", |
|
"mmlu_clinical_knowledge", |
|
"mmlu_human_aging", |
|
"mmlu_professional_accounting", |
|
"mmlu_marketing", |
|
"mmlu_global_facts", |
|
"mmlu_professional_medicine", |
|
"mmlu_virology", |
|
"mmlu_management" |
|
], |
|
"mmlu_stem": [ |
|
"mmlu_elementary_mathematics", |
|
"mmlu_electrical_engineering", |
|
"mmlu_high_school_computer_science", |
|
"mmlu_high_school_physics", |
|
"mmlu_college_mathematics", |
|
"mmlu_college_chemistry", |
|
"mmlu_machine_learning", |
|
"mmlu_high_school_mathematics", |
|
"mmlu_computer_security", |
|
"mmlu_conceptual_physics", |
|
"mmlu_high_school_statistics", |
|
"mmlu_high_school_biology", |
|
"mmlu_astronomy", |
|
"mmlu_college_computer_science", |
|
"mmlu_college_biology", |
|
"mmlu_college_physics", |
|
"mmlu_anatomy", |
|
"mmlu_high_school_chemistry", |
|
"mmlu_abstract_algebra" |
|
], |
|
"mmlu": [ |
|
"mmlu_stem", |
|
"mmlu_other", |
|
"mmlu_social_sciences", |
|
"mmlu_humanities" |
|
], |
|
"piqa": [], |
|
"sciq": [], |
|
"wikitext": [], |
|
"winogrande": [], |
|
"wsc": [] |
|
}, |
|
"configs": { |
|
"arc_challenge": { |
|
"task": "arc_challenge", |
|
"tag": [ |
|
"ai2_arc" |
|
], |
|
"dataset_path": "allenai/ai2_arc", |
|
"dataset_name": "ARC-Challenge", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "Question: {{question}}\nAnswer:", |
|
"doc_to_target": "{{choices.label.index(answerKey)}}", |
|
"doc_to_choice": "{{choices.text}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "Question: {{question}}\nAnswer:", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"arc_easy": { |
|
"task": "arc_easy", |
|
"tag": [ |
|
"ai2_arc" |
|
], |
|
"dataset_path": "allenai/ai2_arc", |
|
"dataset_name": "ARC-Easy", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "Question: {{question}}\nAnswer:", |
|
"doc_to_target": "{{choices.label.index(answerKey)}}", |
|
"doc_to_choice": "{{choices.text}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "Question: {{question}}\nAnswer:", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_adjunct_island": { |
|
"task": "blimp_adjunct_island", |
|
"dataset_path": "blimp", |
|
"dataset_name": "adjunct_island", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_anaphor_gender_agreement": { |
|
"task": "blimp_anaphor_gender_agreement", |
|
"dataset_path": "blimp", |
|
"dataset_name": "anaphor_gender_agreement", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_anaphor_number_agreement": { |
|
"task": "blimp_anaphor_number_agreement", |
|
"dataset_path": "blimp", |
|
"dataset_name": "anaphor_number_agreement", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_animate_subject_passive": { |
|
"task": "blimp_animate_subject_passive", |
|
"dataset_path": "blimp", |
|
"dataset_name": "animate_subject_passive", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_animate_subject_trans": { |
|
"task": "blimp_animate_subject_trans", |
|
"dataset_path": "blimp", |
|
"dataset_name": "animate_subject_trans", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_causative": { |
|
"task": "blimp_causative", |
|
"dataset_path": "blimp", |
|
"dataset_name": "causative", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_complex_NP_island": { |
|
"task": "blimp_complex_NP_island", |
|
"dataset_path": "blimp", |
|
"dataset_name": "complex_NP_island", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_coordinate_structure_constraint_complex_left_branch": { |
|
"task": "blimp_coordinate_structure_constraint_complex_left_branch", |
|
"dataset_path": "blimp", |
|
"dataset_name": "coordinate_structure_constraint_complex_left_branch", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_coordinate_structure_constraint_object_extraction": { |
|
"task": "blimp_coordinate_structure_constraint_object_extraction", |
|
"dataset_path": "blimp", |
|
"dataset_name": "coordinate_structure_constraint_object_extraction", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_1": { |
|
"task": "blimp_determiner_noun_agreement_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_2": { |
|
"task": "blimp_determiner_noun_agreement_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_1": { |
|
"task": "blimp_determiner_noun_agreement_irregular_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_irregular_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_2": { |
|
"task": "blimp_determiner_noun_agreement_irregular_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_irregular_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_2": { |
|
"task": "blimp_determiner_noun_agreement_with_adj_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_with_adj_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1": { |
|
"task": "blimp_determiner_noun_agreement_with_adj_irregular_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_with_adj_irregular_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2": { |
|
"task": "blimp_determiner_noun_agreement_with_adj_irregular_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_with_adj_irregular_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_determiner_noun_agreement_with_adjective_1": { |
|
"task": "blimp_determiner_noun_agreement_with_adjective_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "determiner_noun_agreement_with_adjective_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_distractor_agreement_relational_noun": { |
|
"task": "blimp_distractor_agreement_relational_noun", |
|
"dataset_path": "blimp", |
|
"dataset_name": "distractor_agreement_relational_noun", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_distractor_agreement_relative_clause": { |
|
"task": "blimp_distractor_agreement_relative_clause", |
|
"dataset_path": "blimp", |
|
"dataset_name": "distractor_agreement_relative_clause", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_drop_argument": { |
|
"task": "blimp_drop_argument", |
|
"dataset_path": "blimp", |
|
"dataset_name": "drop_argument", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_ellipsis_n_bar_1": { |
|
"task": "blimp_ellipsis_n_bar_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "ellipsis_n_bar_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_ellipsis_n_bar_2": { |
|
"task": "blimp_ellipsis_n_bar_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "ellipsis_n_bar_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_existential_there_object_raising": { |
|
"task": "blimp_existential_there_object_raising", |
|
"dataset_path": "blimp", |
|
"dataset_name": "existential_there_object_raising", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_existential_there_quantifiers_1": { |
|
"task": "blimp_existential_there_quantifiers_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "existential_there_quantifiers_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_existential_there_quantifiers_2": { |
|
"task": "blimp_existential_there_quantifiers_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "existential_there_quantifiers_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_existential_there_subject_raising": { |
|
"task": "blimp_existential_there_subject_raising", |
|
"dataset_path": "blimp", |
|
"dataset_name": "existential_there_subject_raising", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_expletive_it_object_raising": { |
|
"task": "blimp_expletive_it_object_raising", |
|
"dataset_path": "blimp", |
|
"dataset_name": "expletive_it_object_raising", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_inchoative": { |
|
"task": "blimp_inchoative", |
|
"dataset_path": "blimp", |
|
"dataset_name": "inchoative", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_intransitive": { |
|
"task": "blimp_intransitive", |
|
"dataset_path": "blimp", |
|
"dataset_name": "intransitive", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_irregular_past_participle_adjectives": { |
|
"task": "blimp_irregular_past_participle_adjectives", |
|
"dataset_path": "blimp", |
|
"dataset_name": "irregular_past_participle_adjectives", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_irregular_past_participle_verbs": { |
|
"task": "blimp_irregular_past_participle_verbs", |
|
"dataset_path": "blimp", |
|
"dataset_name": "irregular_past_participle_verbs", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_1": { |
|
"task": "blimp_irregular_plural_subject_verb_agreement_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "irregular_plural_subject_verb_agreement_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_2": { |
|
"task": "blimp_irregular_plural_subject_verb_agreement_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "irregular_plural_subject_verb_agreement_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_left_branch_island_echo_question": { |
|
"task": "blimp_left_branch_island_echo_question", |
|
"dataset_path": "blimp", |
|
"dataset_name": "left_branch_island_echo_question", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_left_branch_island_simple_question": { |
|
"task": "blimp_left_branch_island_simple_question", |
|
"dataset_path": "blimp", |
|
"dataset_name": "left_branch_island_simple_question", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_matrix_question_npi_licensor_present": { |
|
"task": "blimp_matrix_question_npi_licensor_present", |
|
"dataset_path": "blimp", |
|
"dataset_name": "matrix_question_npi_licensor_present", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_npi_present_1": { |
|
"task": "blimp_npi_present_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "npi_present_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_npi_present_2": { |
|
"task": "blimp_npi_present_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "npi_present_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_only_npi_licensor_present": { |
|
"task": "blimp_only_npi_licensor_present", |
|
"dataset_path": "blimp", |
|
"dataset_name": "only_npi_licensor_present", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_only_npi_scope": { |
|
"task": "blimp_only_npi_scope", |
|
"dataset_path": "blimp", |
|
"dataset_name": "only_npi_scope", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_passive_1": { |
|
"task": "blimp_passive_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "passive_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_passive_2": { |
|
"task": "blimp_passive_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "passive_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_c_command": { |
|
"task": "blimp_principle_A_c_command", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_c_command", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_case_1": { |
|
"task": "blimp_principle_A_case_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_case_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_case_2": { |
|
"task": "blimp_principle_A_case_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_case_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_domain_1": { |
|
"task": "blimp_principle_A_domain_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_domain_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_domain_2": { |
|
"task": "blimp_principle_A_domain_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_domain_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_domain_3": { |
|
"task": "blimp_principle_A_domain_3", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_domain_3", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_principle_A_reconstruction": { |
|
"task": "blimp_principle_A_reconstruction", |
|
"dataset_path": "blimp", |
|
"dataset_name": "principle_A_reconstruction", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_1": { |
|
"task": "blimp_regular_plural_subject_verb_agreement_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "regular_plural_subject_verb_agreement_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_2": { |
|
"task": "blimp_regular_plural_subject_verb_agreement_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "regular_plural_subject_verb_agreement_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_sentential_negation_npi_licensor_present": { |
|
"task": "blimp_sentential_negation_npi_licensor_present", |
|
"dataset_path": "blimp", |
|
"dataset_name": "sentential_negation_npi_licensor_present", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_sentential_negation_npi_scope": { |
|
"task": "blimp_sentential_negation_npi_scope", |
|
"dataset_path": "blimp", |
|
"dataset_name": "sentential_negation_npi_scope", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_sentential_subject_island": { |
|
"task": "blimp_sentential_subject_island", |
|
"dataset_path": "blimp", |
|
"dataset_name": "sentential_subject_island", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_superlative_quantifiers_1": { |
|
"task": "blimp_superlative_quantifiers_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "superlative_quantifiers_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_superlative_quantifiers_2": { |
|
"task": "blimp_superlative_quantifiers_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "superlative_quantifiers_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_tough_vs_raising_1": { |
|
"task": "blimp_tough_vs_raising_1", |
|
"dataset_path": "blimp", |
|
"dataset_name": "tough_vs_raising_1", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_tough_vs_raising_2": { |
|
"task": "blimp_tough_vs_raising_2", |
|
"dataset_path": "blimp", |
|
"dataset_name": "tough_vs_raising_2", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_transitive": { |
|
"task": "blimp_transitive", |
|
"dataset_path": "blimp", |
|
"dataset_name": "transitive", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_island": { |
|
"task": "blimp_wh_island", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_island", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_questions_object_gap": { |
|
"task": "blimp_wh_questions_object_gap", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_questions_object_gap", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_questions_subject_gap": { |
|
"task": "blimp_wh_questions_subject_gap", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_questions_subject_gap", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_questions_subject_gap_long_distance": { |
|
"task": "blimp_wh_questions_subject_gap_long_distance", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_questions_subject_gap_long_distance", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_vs_that_no_gap": { |
|
"task": "blimp_wh_vs_that_no_gap", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_vs_that_no_gap", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_vs_that_no_gap_long_distance": { |
|
"task": "blimp_wh_vs_that_no_gap_long_distance", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_vs_that_no_gap_long_distance", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_vs_that_with_gap": { |
|
"task": "blimp_wh_vs_that_with_gap", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_vs_that_with_gap", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"blimp_wh_vs_that_with_gap_long_distance": { |
|
"task": "blimp_wh_vs_that_with_gap_long_distance", |
|
"dataset_path": "blimp", |
|
"dataset_name": "wh_vs_that_with_gap_long_distance", |
|
"validation_split": "train", |
|
"doc_to_text": "", |
|
"doc_to_target": 0, |
|
"doc_to_choice": "{{[sentence_good, sentence_bad]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"lambada_openai": { |
|
"task": "lambada_openai", |
|
"tag": [ |
|
"lambada" |
|
], |
|
"dataset_path": "EleutherAI/lambada_openai", |
|
"dataset_name": "default", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}", |
|
"doc_to_target": "{{' '+text.split(' ')[-1]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "perplexity", |
|
"aggregation": "perplexity", |
|
"higher_is_better": false |
|
}, |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "loglikelihood", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{text}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"logiqa": { |
|
"task": "logiqa", |
|
"dataset_path": "EleutherAI/logiqa", |
|
"dataset_name": "logiqa", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "def doc_to_text(doc) -> str:\n \"\"\"\n Passage: <passage>\n Question: <question>\n Choices:\n A. <choice1>\n B. <choice2>\n C. <choice3>\n D. <choice4>\n Answer:\n \"\"\"\n choices = [\"a\", \"b\", \"c\", \"d\"]\n prompt = \"Passage: \" + doc[\"context\"] + \"\\n\"\n prompt += \"Question: \" + doc[\"question\"] + \"\\nChoices:\\n\"\n for choice, option in zip(choices, doc[\"options\"]):\n prompt += f\"{choice.upper()}. {option}\\n\"\n prompt += \"Answer:\"\n return prompt\n", |
|
"doc_to_target": "def doc_to_target(doc) -> int:\n choices = [\"a\", \"b\", \"c\", \"d\"]\n return choices.index(doc[\"label\"].strip())\n", |
|
"doc_to_choice": "{{options}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{context}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_abstract_algebra": { |
|
"task": "mmlu_abstract_algebra", |
|
"task_alias": "abstract_algebra", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "abstract_algebra", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_anatomy": { |
|
"task": "mmlu_anatomy", |
|
"task_alias": "anatomy", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "anatomy", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about anatomy.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_astronomy": { |
|
"task": "mmlu_astronomy", |
|
"task_alias": "astronomy", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "astronomy", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_business_ethics": { |
|
"task": "mmlu_business_ethics", |
|
"task_alias": "business_ethics", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "business_ethics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about business ethics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_clinical_knowledge": { |
|
"task": "mmlu_clinical_knowledge", |
|
"task_alias": "clinical_knowledge", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "clinical_knowledge", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_college_biology": { |
|
"task": "mmlu_college_biology", |
|
"task_alias": "college_biology", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "college_biology", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about college biology.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_college_chemistry": { |
|
"task": "mmlu_college_chemistry", |
|
"task_alias": "college_chemistry", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "college_chemistry", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about college chemistry.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_college_computer_science": { |
|
"task": "mmlu_college_computer_science", |
|
"task_alias": "college_computer_science", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "college_computer_science", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about college computer science.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_college_mathematics": { |
|
"task": "mmlu_college_mathematics", |
|
"task_alias": "college_mathematics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "college_mathematics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about college mathematics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_college_medicine": { |
|
"task": "mmlu_college_medicine", |
|
"task_alias": "college_medicine", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "college_medicine", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about college medicine.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_college_physics": { |
|
"task": "mmlu_college_physics", |
|
"task_alias": "college_physics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "college_physics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about college physics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_computer_security": { |
|
"task": "mmlu_computer_security", |
|
"task_alias": "computer_security", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "computer_security", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about computer security.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_conceptual_physics": { |
|
"task": "mmlu_conceptual_physics", |
|
"task_alias": "conceptual_physics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "conceptual_physics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_econometrics": { |
|
"task": "mmlu_econometrics", |
|
"task_alias": "econometrics", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "econometrics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about econometrics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_electrical_engineering": { |
|
"task": "mmlu_electrical_engineering", |
|
"task_alias": "electrical_engineering", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "electrical_engineering", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_elementary_mathematics": { |
|
"task": "mmlu_elementary_mathematics", |
|
"task_alias": "elementary_mathematics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "elementary_mathematics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_formal_logic": { |
|
"task": "mmlu_formal_logic", |
|
"task_alias": "formal_logic", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "formal_logic", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about formal logic.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_global_facts": { |
|
"task": "mmlu_global_facts", |
|
"task_alias": "global_facts", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "global_facts", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about global facts.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_biology": { |
|
"task": "mmlu_high_school_biology", |
|
"task_alias": "high_school_biology", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_biology", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school biology.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_chemistry": { |
|
"task": "mmlu_high_school_chemistry", |
|
"task_alias": "high_school_chemistry", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_chemistry", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_computer_science": { |
|
"task": "mmlu_high_school_computer_science", |
|
"task_alias": "high_school_computer_science", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_computer_science", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school computer science.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_european_history": { |
|
"task": "mmlu_high_school_european_history", |
|
"task_alias": "high_school_european_history", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_european_history", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school european history.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_geography": { |
|
"task": "mmlu_high_school_geography", |
|
"task_alias": "high_school_geography", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_geography", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school geography.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_government_and_politics": { |
|
"task": "mmlu_high_school_government_and_politics", |
|
"task_alias": "high_school_government_and_politics", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_government_and_politics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_macroeconomics": { |
|
"task": "mmlu_high_school_macroeconomics", |
|
"task_alias": "high_school_macroeconomics", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_macroeconomics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_mathematics": { |
|
"task": "mmlu_high_school_mathematics", |
|
"task_alias": "high_school_mathematics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_mathematics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_microeconomics": { |
|
"task": "mmlu_high_school_microeconomics", |
|
"task_alias": "high_school_microeconomics", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_microeconomics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_physics": { |
|
"task": "mmlu_high_school_physics", |
|
"task_alias": "high_school_physics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_physics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school physics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_psychology": { |
|
"task": "mmlu_high_school_psychology", |
|
"task_alias": "high_school_psychology", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_psychology", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school psychology.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_statistics": { |
|
"task": "mmlu_high_school_statistics", |
|
"task_alias": "high_school_statistics", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_statistics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school statistics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_us_history": { |
|
"task": "mmlu_high_school_us_history", |
|
"task_alias": "high_school_us_history", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_us_history", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school us history.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_high_school_world_history": { |
|
"task": "mmlu_high_school_world_history", |
|
"task_alias": "high_school_world_history", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "high_school_world_history", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about high school world history.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_human_aging": { |
|
"task": "mmlu_human_aging", |
|
"task_alias": "human_aging", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "human_aging", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about human aging.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_human_sexuality": { |
|
"task": "mmlu_human_sexuality", |
|
"task_alias": "human_sexuality", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "human_sexuality", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about human sexuality.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_international_law": { |
|
"task": "mmlu_international_law", |
|
"task_alias": "international_law", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "international_law", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about international law.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_jurisprudence": { |
|
"task": "mmlu_jurisprudence", |
|
"task_alias": "jurisprudence", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "jurisprudence", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_logical_fallacies": { |
|
"task": "mmlu_logical_fallacies", |
|
"task_alias": "logical_fallacies", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "logical_fallacies", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_machine_learning": { |
|
"task": "mmlu_machine_learning", |
|
"task_alias": "machine_learning", |
|
"tag": "mmlu_stem_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "machine_learning", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about machine learning.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_management": { |
|
"task": "mmlu_management", |
|
"task_alias": "management", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "management", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about management.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_marketing": { |
|
"task": "mmlu_marketing", |
|
"task_alias": "marketing", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "marketing", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about marketing.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_medical_genetics": { |
|
"task": "mmlu_medical_genetics", |
|
"task_alias": "medical_genetics", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "medical_genetics", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about medical genetics.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_miscellaneous": { |
|
"task": "mmlu_miscellaneous", |
|
"task_alias": "miscellaneous", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "miscellaneous", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_moral_disputes": { |
|
"task": "mmlu_moral_disputes", |
|
"task_alias": "moral_disputes", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "moral_disputes", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about moral disputes.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_moral_scenarios": { |
|
"task": "mmlu_moral_scenarios", |
|
"task_alias": "moral_scenarios", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "moral_scenarios", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_nutrition": { |
|
"task": "mmlu_nutrition", |
|
"task_alias": "nutrition", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "nutrition", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about nutrition.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_philosophy": { |
|
"task": "mmlu_philosophy", |
|
"task_alias": "philosophy", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "philosophy", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about philosophy.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_prehistory": { |
|
"task": "mmlu_prehistory", |
|
"task_alias": "prehistory", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "prehistory", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about prehistory.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_professional_accounting": { |
|
"task": "mmlu_professional_accounting", |
|
"task_alias": "professional_accounting", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "professional_accounting", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about professional accounting.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_professional_law": { |
|
"task": "mmlu_professional_law", |
|
"task_alias": "professional_law", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "professional_law", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about professional law.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_professional_medicine": { |
|
"task": "mmlu_professional_medicine", |
|
"task_alias": "professional_medicine", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "professional_medicine", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about professional medicine.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_professional_psychology": { |
|
"task": "mmlu_professional_psychology", |
|
"task_alias": "professional_psychology", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "professional_psychology", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about professional psychology.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_public_relations": { |
|
"task": "mmlu_public_relations", |
|
"task_alias": "public_relations", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "public_relations", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about public relations.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_security_studies": { |
|
"task": "mmlu_security_studies", |
|
"task_alias": "security_studies", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "security_studies", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about security studies.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_sociology": { |
|
"task": "mmlu_sociology", |
|
"task_alias": "sociology", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "sociology", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about sociology.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_us_foreign_policy": { |
|
"task": "mmlu_us_foreign_policy", |
|
"task_alias": "us_foreign_policy", |
|
"tag": "mmlu_social_sciences_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "us_foreign_policy", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_virology": { |
|
"task": "mmlu_virology", |
|
"task_alias": "virology", |
|
"tag": "mmlu_other_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "virology", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about virology.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"mmlu_world_religions": { |
|
"task": "mmlu_world_religions", |
|
"task_alias": "world_religions", |
|
"tag": "mmlu_humanities_tasks", |
|
"dataset_path": "hails/mmlu_no_train", |
|
"dataset_name": "world_religions", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"test_split": "test", |
|
"fewshot_split": "dev", |
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", |
|
"doc_to_target": "answer", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "The following are multiple choice questions (with answers) about world religions.\n\n", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"piqa": { |
|
"task": "piqa", |
|
"dataset_path": "piqa", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "Question: {{goal}}\nAnswer:", |
|
"doc_to_target": "label", |
|
"doc_to_choice": "{{[sol1, sol2]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "goal", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"sciq": { |
|
"task": "sciq", |
|
"dataset_path": "sciq", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "{{support.lstrip()}}\nQuestion: {{question}}\nAnswer:", |
|
"doc_to_target": 3, |
|
"doc_to_choice": "{{[distractor1, distractor2, distractor3, correct_answer]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{support}} {{question}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"wikitext": { |
|
"task": "wikitext", |
|
"dataset_path": "EleutherAI/wikitext_document_level", |
|
"dataset_name": "wikitext-2-raw-v1", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "", |
|
"doc_to_target": "def wikitext_detokenizer(doc):\n string = doc[\"page\"]\n # contractions\n string = string.replace(\"s '\", \"s'\")\n string = re.sub(r\"/' [0-9]/\", r\"/'[0-9]/\", string)\n # number separators\n string = string.replace(\" @-@ \", \"-\")\n string = string.replace(\" @,@ \", \",\")\n string = string.replace(\" @.@ \", \".\")\n # punctuation\n string = string.replace(\" : \", \": \")\n string = string.replace(\" ; \", \"; \")\n string = string.replace(\" . \", \". \")\n string = string.replace(\" ! \", \"! \")\n string = string.replace(\" ? \", \"? \")\n string = string.replace(\" , \", \", \")\n # double brackets\n string = re.sub(r\"\\(\\s*([^\\)]*?)\\s*\\)\", r\"(\\1)\", string)\n string = re.sub(r\"\\[\\s*([^\\]]*?)\\s*\\]\", r\"[\\1]\", string)\n string = re.sub(r\"{\\s*([^}]*?)\\s*}\", r\"{\\1}\", string)\n string = re.sub(r\"\\\"\\s*([^\\\"]*?)\\s*\\\"\", r'\"\\1\"', string)\n string = re.sub(r\"'\\s*([^']*?)\\s*'\", r\"'\\1'\", string)\n # miscellaneous\n string = string.replace(\"= = = =\", \"====\")\n string = string.replace(\"= = =\", \"===\")\n string = string.replace(\"= =\", \"==\")\n string = string.replace(\" \" + chr(176) + \" \", chr(176))\n string = string.replace(\" \\n\", \"\\n\")\n string = string.replace(\"\\n \", \"\\n\")\n string = string.replace(\" N \", \" 1 \")\n string = string.replace(\" 's\", \"'s\")\n\n return string\n", |
|
"process_results": "def process_results(doc, results):\n (loglikelihood,) = results\n # IMPORTANT: wikitext counts number of words in *original doc before detokenization*\n _words = len(re.split(r\"\\s+\", doc[\"page\"]))\n _bytes = len(doc[\"page\"].encode(\"utf-8\"))\n return {\n \"word_perplexity\": (loglikelihood, _words),\n \"byte_perplexity\": (loglikelihood, _bytes),\n \"bits_per_byte\": (loglikelihood, _bytes),\n }\n", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "word_perplexity" |
|
}, |
|
{ |
|
"metric": "byte_perplexity" |
|
}, |
|
{ |
|
"metric": "bits_per_byte" |
|
} |
|
], |
|
"output_type": "loglikelihood_rolling", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{page}}", |
|
"metadata": { |
|
"version": 2.0 |
|
} |
|
}, |
|
"winogrande": { |
|
"task": "winogrande", |
|
"dataset_path": "winogrande", |
|
"dataset_name": "winogrande_xl", |
|
"dataset_kwargs": { |
|
"trust_remote_code": true |
|
}, |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n", |
|
"doc_to_target": "def doc_to_target(doc):\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n", |
|
"doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "sentence", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"wsc": { |
|
"task": "wsc", |
|
"tag": [ |
|
"super-glue-lm-eval-v1" |
|
], |
|
"dataset_path": "super_glue", |
|
"dataset_name": "wsc.fixed", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "def default_doc_to_text(x):\n raw_passage = x[\"text\"]\n # NOTE: HuggingFace span indices are word-based not character-based.\n pre = \" \".join(raw_passage.split()[: x[\"span2_index\"]])\n post = raw_passage[len(pre) + len(x[\"span2_text\"]) + 1 :]\n passage = general_detokenize(pre + \" *{}*\".format(x[\"span2_text\"]) + post)\n noun = x[\"span1_text\"]\n pronoun = x[\"span2_text\"]\n text = (\n f\"Passage: {passage}\\n\"\n + f'Question: In the passage above, does the pronoun \"*{pronoun}*\" refer to \"*{noun}*\"?\\n'\n + \"Answer:\"\n )\n return text\n", |
|
"doc_to_target": "label", |
|
"doc_to_choice": [ |
|
"no", |
|
"yes" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc" |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": false, |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
} |
|
}, |
|
"versions": { |
|
"arc_challenge": 1.0, |
|
"arc_easy": 1.0, |
|
"blimp": 2.0, |
|
"blimp_adjunct_island": 1.0, |
|
"blimp_anaphor_gender_agreement": 1.0, |
|
"blimp_anaphor_number_agreement": 1.0, |
|
"blimp_animate_subject_passive": 1.0, |
|
"blimp_animate_subject_trans": 1.0, |
|
"blimp_causative": 1.0, |
|
"blimp_complex_NP_island": 1.0, |
|
"blimp_coordinate_structure_constraint_complex_left_branch": 1.0, |
|
"blimp_coordinate_structure_constraint_object_extraction": 1.0, |
|
"blimp_determiner_noun_agreement_1": 1.0, |
|
"blimp_determiner_noun_agreement_2": 1.0, |
|
"blimp_determiner_noun_agreement_irregular_1": 1.0, |
|
"blimp_determiner_noun_agreement_irregular_2": 1.0, |
|
"blimp_determiner_noun_agreement_with_adj_2": 1.0, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0, |
|
"blimp_determiner_noun_agreement_with_adjective_1": 1.0, |
|
"blimp_distractor_agreement_relational_noun": 1.0, |
|
"blimp_distractor_agreement_relative_clause": 1.0, |
|
"blimp_drop_argument": 1.0, |
|
"blimp_ellipsis_n_bar_1": 1.0, |
|
"blimp_ellipsis_n_bar_2": 1.0, |
|
"blimp_existential_there_object_raising": 1.0, |
|
"blimp_existential_there_quantifiers_1": 1.0, |
|
"blimp_existential_there_quantifiers_2": 1.0, |
|
"blimp_existential_there_subject_raising": 1.0, |
|
"blimp_expletive_it_object_raising": 1.0, |
|
"blimp_inchoative": 1.0, |
|
"blimp_intransitive": 1.0, |
|
"blimp_irregular_past_participle_adjectives": 1.0, |
|
"blimp_irregular_past_participle_verbs": 1.0, |
|
"blimp_irregular_plural_subject_verb_agreement_1": 1.0, |
|
"blimp_irregular_plural_subject_verb_agreement_2": 1.0, |
|
"blimp_left_branch_island_echo_question": 1.0, |
|
"blimp_left_branch_island_simple_question": 1.0, |
|
"blimp_matrix_question_npi_licensor_present": 1.0, |
|
"blimp_npi_present_1": 1.0, |
|
"blimp_npi_present_2": 1.0, |
|
"blimp_only_npi_licensor_present": 1.0, |
|
"blimp_only_npi_scope": 1.0, |
|
"blimp_passive_1": 1.0, |
|
"blimp_passive_2": 1.0, |
|
"blimp_principle_A_c_command": 1.0, |
|
"blimp_principle_A_case_1": 1.0, |
|
"blimp_principle_A_case_2": 1.0, |
|
"blimp_principle_A_domain_1": 1.0, |
|
"blimp_principle_A_domain_2": 1.0, |
|
"blimp_principle_A_domain_3": 1.0, |
|
"blimp_principle_A_reconstruction": 1.0, |
|
"blimp_regular_plural_subject_verb_agreement_1": 1.0, |
|
"blimp_regular_plural_subject_verb_agreement_2": 1.0, |
|
"blimp_sentential_negation_npi_licensor_present": 1.0, |
|
"blimp_sentential_negation_npi_scope": 1.0, |
|
"blimp_sentential_subject_island": 1.0, |
|
"blimp_superlative_quantifiers_1": 1.0, |
|
"blimp_superlative_quantifiers_2": 1.0, |
|
"blimp_tough_vs_raising_1": 1.0, |
|
"blimp_tough_vs_raising_2": 1.0, |
|
"blimp_transitive": 1.0, |
|
"blimp_wh_island": 1.0, |
|
"blimp_wh_questions_object_gap": 1.0, |
|
"blimp_wh_questions_subject_gap": 1.0, |
|
"blimp_wh_questions_subject_gap_long_distance": 1.0, |
|
"blimp_wh_vs_that_no_gap": 1.0, |
|
"blimp_wh_vs_that_no_gap_long_distance": 1.0, |
|
"blimp_wh_vs_that_with_gap": 1.0, |
|
"blimp_wh_vs_that_with_gap_long_distance": 1.0, |
|
"lambada_openai": 1.0, |
|
"logiqa": 1.0, |
|
"mmlu": 2, |
|
"mmlu_abstract_algebra": 1.0, |
|
"mmlu_anatomy": 1.0, |
|
"mmlu_astronomy": 1.0, |
|
"mmlu_business_ethics": 1.0, |
|
"mmlu_clinical_knowledge": 1.0, |
|
"mmlu_college_biology": 1.0, |
|
"mmlu_college_chemistry": 1.0, |
|
"mmlu_college_computer_science": 1.0, |
|
"mmlu_college_mathematics": 1.0, |
|
"mmlu_college_medicine": 1.0, |
|
"mmlu_college_physics": 1.0, |
|
"mmlu_computer_security": 1.0, |
|
"mmlu_conceptual_physics": 1.0, |
|
"mmlu_econometrics": 1.0, |
|
"mmlu_electrical_engineering": 1.0, |
|
"mmlu_elementary_mathematics": 1.0, |
|
"mmlu_formal_logic": 1.0, |
|
"mmlu_global_facts": 1.0, |
|
"mmlu_high_school_biology": 1.0, |
|
"mmlu_high_school_chemistry": 1.0, |
|
"mmlu_high_school_computer_science": 1.0, |
|
"mmlu_high_school_european_history": 1.0, |
|
"mmlu_high_school_geography": 1.0, |
|
"mmlu_high_school_government_and_politics": 1.0, |
|
"mmlu_high_school_macroeconomics": 1.0, |
|
"mmlu_high_school_mathematics": 1.0, |
|
"mmlu_high_school_microeconomics": 1.0, |
|
"mmlu_high_school_physics": 1.0, |
|
"mmlu_high_school_psychology": 1.0, |
|
"mmlu_high_school_statistics": 1.0, |
|
"mmlu_high_school_us_history": 1.0, |
|
"mmlu_high_school_world_history": 1.0, |
|
"mmlu_human_aging": 1.0, |
|
"mmlu_human_sexuality": 1.0, |
|
"mmlu_humanities": 2, |
|
"mmlu_international_law": 1.0, |
|
"mmlu_jurisprudence": 1.0, |
|
"mmlu_logical_fallacies": 1.0, |
|
"mmlu_machine_learning": 1.0, |
|
"mmlu_management": 1.0, |
|
"mmlu_marketing": 1.0, |
|
"mmlu_medical_genetics": 1.0, |
|
"mmlu_miscellaneous": 1.0, |
|
"mmlu_moral_disputes": 1.0, |
|
"mmlu_moral_scenarios": 1.0, |
|
"mmlu_nutrition": 1.0, |
|
"mmlu_other": 2, |
|
"mmlu_philosophy": 1.0, |
|
"mmlu_prehistory": 1.0, |
|
"mmlu_professional_accounting": 1.0, |
|
"mmlu_professional_law": 1.0, |
|
"mmlu_professional_medicine": 1.0, |
|
"mmlu_professional_psychology": 1.0, |
|
"mmlu_public_relations": 1.0, |
|
"mmlu_security_studies": 1.0, |
|
"mmlu_social_sciences": 2, |
|
"mmlu_sociology": 1.0, |
|
"mmlu_stem": 2, |
|
"mmlu_us_foreign_policy": 1.0, |
|
"mmlu_virology": 1.0, |
|
"mmlu_world_religions": 1.0, |
|
"piqa": 1.0, |
|
"sciq": 1.0, |
|
"wikitext": 2.0, |
|
"winogrande": 1.0, |
|
"wsc": 1.0 |
|
}, |
|
"n-shot": { |
|
"arc_challenge": 0, |
|
"arc_easy": 0, |
|
"blimp_adjunct_island": 0, |
|
"blimp_anaphor_gender_agreement": 0, |
|
"blimp_anaphor_number_agreement": 0, |
|
"blimp_animate_subject_passive": 0, |
|
"blimp_animate_subject_trans": 0, |
|
"blimp_causative": 0, |
|
"blimp_complex_NP_island": 0, |
|
"blimp_coordinate_structure_constraint_complex_left_branch": 0, |
|
"blimp_coordinate_structure_constraint_object_extraction": 0, |
|
"blimp_determiner_noun_agreement_1": 0, |
|
"blimp_determiner_noun_agreement_2": 0, |
|
"blimp_determiner_noun_agreement_irregular_1": 0, |
|
"blimp_determiner_noun_agreement_irregular_2": 0, |
|
"blimp_determiner_noun_agreement_with_adj_2": 0, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1": 0, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2": 0, |
|
"blimp_determiner_noun_agreement_with_adjective_1": 0, |
|
"blimp_distractor_agreement_relational_noun": 0, |
|
"blimp_distractor_agreement_relative_clause": 0, |
|
"blimp_drop_argument": 0, |
|
"blimp_ellipsis_n_bar_1": 0, |
|
"blimp_ellipsis_n_bar_2": 0, |
|
"blimp_existential_there_object_raising": 0, |
|
"blimp_existential_there_quantifiers_1": 0, |
|
"blimp_existential_there_quantifiers_2": 0, |
|
"blimp_existential_there_subject_raising": 0, |
|
"blimp_expletive_it_object_raising": 0, |
|
"blimp_inchoative": 0, |
|
"blimp_intransitive": 0, |
|
"blimp_irregular_past_participle_adjectives": 0, |
|
"blimp_irregular_past_participle_verbs": 0, |
|
"blimp_irregular_plural_subject_verb_agreement_1": 0, |
|
"blimp_irregular_plural_subject_verb_agreement_2": 0, |
|
"blimp_left_branch_island_echo_question": 0, |
|
"blimp_left_branch_island_simple_question": 0, |
|
"blimp_matrix_question_npi_licensor_present": 0, |
|
"blimp_npi_present_1": 0, |
|
"blimp_npi_present_2": 0, |
|
"blimp_only_npi_licensor_present": 0, |
|
"blimp_only_npi_scope": 0, |
|
"blimp_passive_1": 0, |
|
"blimp_passive_2": 0, |
|
"blimp_principle_A_c_command": 0, |
|
"blimp_principle_A_case_1": 0, |
|
"blimp_principle_A_case_2": 0, |
|
"blimp_principle_A_domain_1": 0, |
|
"blimp_principle_A_domain_2": 0, |
|
"blimp_principle_A_domain_3": 0, |
|
"blimp_principle_A_reconstruction": 0, |
|
"blimp_regular_plural_subject_verb_agreement_1": 0, |
|
"blimp_regular_plural_subject_verb_agreement_2": 0, |
|
"blimp_sentential_negation_npi_licensor_present": 0, |
|
"blimp_sentential_negation_npi_scope": 0, |
|
"blimp_sentential_subject_island": 0, |
|
"blimp_superlative_quantifiers_1": 0, |
|
"blimp_superlative_quantifiers_2": 0, |
|
"blimp_tough_vs_raising_1": 0, |
|
"blimp_tough_vs_raising_2": 0, |
|
"blimp_transitive": 0, |
|
"blimp_wh_island": 0, |
|
"blimp_wh_questions_object_gap": 0, |
|
"blimp_wh_questions_subject_gap": 0, |
|
"blimp_wh_questions_subject_gap_long_distance": 0, |
|
"blimp_wh_vs_that_no_gap": 0, |
|
"blimp_wh_vs_that_no_gap_long_distance": 0, |
|
"blimp_wh_vs_that_with_gap": 0, |
|
"blimp_wh_vs_that_with_gap_long_distance": 0, |
|
"lambada_openai": 0, |
|
"logiqa": 0, |
|
"mmlu_abstract_algebra": 0, |
|
"mmlu_anatomy": 0, |
|
"mmlu_astronomy": 0, |
|
"mmlu_business_ethics": 0, |
|
"mmlu_clinical_knowledge": 0, |
|
"mmlu_college_biology": 0, |
|
"mmlu_college_chemistry": 0, |
|
"mmlu_college_computer_science": 0, |
|
"mmlu_college_mathematics": 0, |
|
"mmlu_college_medicine": 0, |
|
"mmlu_college_physics": 0, |
|
"mmlu_computer_security": 0, |
|
"mmlu_conceptual_physics": 0, |
|
"mmlu_econometrics": 0, |
|
"mmlu_electrical_engineering": 0, |
|
"mmlu_elementary_mathematics": 0, |
|
"mmlu_formal_logic": 0, |
|
"mmlu_global_facts": 0, |
|
"mmlu_high_school_biology": 0, |
|
"mmlu_high_school_chemistry": 0, |
|
"mmlu_high_school_computer_science": 0, |
|
"mmlu_high_school_european_history": 0, |
|
"mmlu_high_school_geography": 0, |
|
"mmlu_high_school_government_and_politics": 0, |
|
"mmlu_high_school_macroeconomics": 0, |
|
"mmlu_high_school_mathematics": 0, |
|
"mmlu_high_school_microeconomics": 0, |
|
"mmlu_high_school_physics": 0, |
|
"mmlu_high_school_psychology": 0, |
|
"mmlu_high_school_statistics": 0, |
|
"mmlu_high_school_us_history": 0, |
|
"mmlu_high_school_world_history": 0, |
|
"mmlu_human_aging": 0, |
|
"mmlu_human_sexuality": 0, |
|
"mmlu_international_law": 0, |
|
"mmlu_jurisprudence": 0, |
|
"mmlu_logical_fallacies": 0, |
|
"mmlu_machine_learning": 0, |
|
"mmlu_management": 0, |
|
"mmlu_marketing": 0, |
|
"mmlu_medical_genetics": 0, |
|
"mmlu_miscellaneous": 0, |
|
"mmlu_moral_disputes": 0, |
|
"mmlu_moral_scenarios": 0, |
|
"mmlu_nutrition": 0, |
|
"mmlu_philosophy": 0, |
|
"mmlu_prehistory": 0, |
|
"mmlu_professional_accounting": 0, |
|
"mmlu_professional_law": 0, |
|
"mmlu_professional_medicine": 0, |
|
"mmlu_professional_psychology": 0, |
|
"mmlu_public_relations": 0, |
|
"mmlu_security_studies": 0, |
|
"mmlu_sociology": 0, |
|
"mmlu_us_foreign_policy": 0, |
|
"mmlu_virology": 0, |
|
"mmlu_world_religions": 0, |
|
"piqa": 0, |
|
"sciq": 0, |
|
"wikitext": 0, |
|
"winogrande": 0, |
|
"wsc": 0 |
|
}, |
|
"higher_is_better": { |
|
"arc_challenge": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"arc_easy": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"blimp": { |
|
"acc": true |
|
}, |
|
"blimp_adjunct_island": { |
|
"acc": true |
|
}, |
|
"blimp_anaphor_gender_agreement": { |
|
"acc": true |
|
}, |
|
"blimp_anaphor_number_agreement": { |
|
"acc": true |
|
}, |
|
"blimp_animate_subject_passive": { |
|
"acc": true |
|
}, |
|
"blimp_animate_subject_trans": { |
|
"acc": true |
|
}, |
|
"blimp_causative": { |
|
"acc": true |
|
}, |
|
"blimp_complex_NP_island": { |
|
"acc": true |
|
}, |
|
"blimp_coordinate_structure_constraint_complex_left_branch": { |
|
"acc": true |
|
}, |
|
"blimp_coordinate_structure_constraint_object_extraction": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_1": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_2": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_1": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_2": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_2": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2": { |
|
"acc": true |
|
}, |
|
"blimp_determiner_noun_agreement_with_adjective_1": { |
|
"acc": true |
|
}, |
|
"blimp_distractor_agreement_relational_noun": { |
|
"acc": true |
|
}, |
|
"blimp_distractor_agreement_relative_clause": { |
|
"acc": true |
|
}, |
|
"blimp_drop_argument": { |
|
"acc": true |
|
}, |
|
"blimp_ellipsis_n_bar_1": { |
|
"acc": true |
|
}, |
|
"blimp_ellipsis_n_bar_2": { |
|
"acc": true |
|
}, |
|
"blimp_existential_there_object_raising": { |
|
"acc": true |
|
}, |
|
"blimp_existential_there_quantifiers_1": { |
|
"acc": true |
|
}, |
|
"blimp_existential_there_quantifiers_2": { |
|
"acc": true |
|
}, |
|
"blimp_existential_there_subject_raising": { |
|
"acc": true |
|
}, |
|
"blimp_expletive_it_object_raising": { |
|
"acc": true |
|
}, |
|
"blimp_inchoative": { |
|
"acc": true |
|
}, |
|
"blimp_intransitive": { |
|
"acc": true |
|
}, |
|
"blimp_irregular_past_participle_adjectives": { |
|
"acc": true |
|
}, |
|
"blimp_irregular_past_participle_verbs": { |
|
"acc": true |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_1": { |
|
"acc": true |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_2": { |
|
"acc": true |
|
}, |
|
"blimp_left_branch_island_echo_question": { |
|
"acc": true |
|
}, |
|
"blimp_left_branch_island_simple_question": { |
|
"acc": true |
|
}, |
|
"blimp_matrix_question_npi_licensor_present": { |
|
"acc": true |
|
}, |
|
"blimp_npi_present_1": { |
|
"acc": true |
|
}, |
|
"blimp_npi_present_2": { |
|
"acc": true |
|
}, |
|
"blimp_only_npi_licensor_present": { |
|
"acc": true |
|
}, |
|
"blimp_only_npi_scope": { |
|
"acc": true |
|
}, |
|
"blimp_passive_1": { |
|
"acc": true |
|
}, |
|
"blimp_passive_2": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_c_command": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_case_1": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_case_2": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_domain_1": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_domain_2": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_domain_3": { |
|
"acc": true |
|
}, |
|
"blimp_principle_A_reconstruction": { |
|
"acc": true |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_1": { |
|
"acc": true |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_2": { |
|
"acc": true |
|
}, |
|
"blimp_sentential_negation_npi_licensor_present": { |
|
"acc": true |
|
}, |
|
"blimp_sentential_negation_npi_scope": { |
|
"acc": true |
|
}, |
|
"blimp_sentential_subject_island": { |
|
"acc": true |
|
}, |
|
"blimp_superlative_quantifiers_1": { |
|
"acc": true |
|
}, |
|
"blimp_superlative_quantifiers_2": { |
|
"acc": true |
|
}, |
|
"blimp_tough_vs_raising_1": { |
|
"acc": true |
|
}, |
|
"blimp_tough_vs_raising_2": { |
|
"acc": true |
|
}, |
|
"blimp_transitive": { |
|
"acc": true |
|
}, |
|
"blimp_wh_island": { |
|
"acc": true |
|
}, |
|
"blimp_wh_questions_object_gap": { |
|
"acc": true |
|
}, |
|
"blimp_wh_questions_subject_gap": { |
|
"acc": true |
|
}, |
|
"blimp_wh_questions_subject_gap_long_distance": { |
|
"acc": true |
|
}, |
|
"blimp_wh_vs_that_no_gap": { |
|
"acc": true |
|
}, |
|
"blimp_wh_vs_that_no_gap_long_distance": { |
|
"acc": true |
|
}, |
|
"blimp_wh_vs_that_with_gap": { |
|
"acc": true |
|
}, |
|
"blimp_wh_vs_that_with_gap_long_distance": { |
|
"acc": true |
|
}, |
|
"lambada_openai": { |
|
"perplexity": false, |
|
"acc": true |
|
}, |
|
"logiqa": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"mmlu": { |
|
"acc": true |
|
}, |
|
"mmlu_abstract_algebra": { |
|
"acc": true |
|
}, |
|
"mmlu_anatomy": { |
|
"acc": true |
|
}, |
|
"mmlu_astronomy": { |
|
"acc": true |
|
}, |
|
"mmlu_business_ethics": { |
|
"acc": true |
|
}, |
|
"mmlu_clinical_knowledge": { |
|
"acc": true |
|
}, |
|
"mmlu_college_biology": { |
|
"acc": true |
|
}, |
|
"mmlu_college_chemistry": { |
|
"acc": true |
|
}, |
|
"mmlu_college_computer_science": { |
|
"acc": true |
|
}, |
|
"mmlu_college_mathematics": { |
|
"acc": true |
|
}, |
|
"mmlu_college_medicine": { |
|
"acc": true |
|
}, |
|
"mmlu_college_physics": { |
|
"acc": true |
|
}, |
|
"mmlu_computer_security": { |
|
"acc": true |
|
}, |
|
"mmlu_conceptual_physics": { |
|
"acc": true |
|
}, |
|
"mmlu_econometrics": { |
|
"acc": true |
|
}, |
|
"mmlu_electrical_engineering": { |
|
"acc": true |
|
}, |
|
"mmlu_elementary_mathematics": { |
|
"acc": true |
|
}, |
|
"mmlu_formal_logic": { |
|
"acc": true |
|
}, |
|
"mmlu_global_facts": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_biology": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_chemistry": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_computer_science": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_european_history": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_geography": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_government_and_politics": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_macroeconomics": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_mathematics": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_microeconomics": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_physics": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_psychology": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_statistics": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_us_history": { |
|
"acc": true |
|
}, |
|
"mmlu_high_school_world_history": { |
|
"acc": true |
|
}, |
|
"mmlu_human_aging": { |
|
"acc": true |
|
}, |
|
"mmlu_human_sexuality": { |
|
"acc": true |
|
}, |
|
"mmlu_humanities": { |
|
"acc": true |
|
}, |
|
"mmlu_international_law": { |
|
"acc": true |
|
}, |
|
"mmlu_jurisprudence": { |
|
"acc": true |
|
}, |
|
"mmlu_logical_fallacies": { |
|
"acc": true |
|
}, |
|
"mmlu_machine_learning": { |
|
"acc": true |
|
}, |
|
"mmlu_management": { |
|
"acc": true |
|
}, |
|
"mmlu_marketing": { |
|
"acc": true |
|
}, |
|
"mmlu_medical_genetics": { |
|
"acc": true |
|
}, |
|
"mmlu_miscellaneous": { |
|
"acc": true |
|
}, |
|
"mmlu_moral_disputes": { |
|
"acc": true |
|
}, |
|
"mmlu_moral_scenarios": { |
|
"acc": true |
|
}, |
|
"mmlu_nutrition": { |
|
"acc": true |
|
}, |
|
"mmlu_other": { |
|
"acc": true |
|
}, |
|
"mmlu_philosophy": { |
|
"acc": true |
|
}, |
|
"mmlu_prehistory": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_accounting": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_law": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_medicine": { |
|
"acc": true |
|
}, |
|
"mmlu_professional_psychology": { |
|
"acc": true |
|
}, |
|
"mmlu_public_relations": { |
|
"acc": true |
|
}, |
|
"mmlu_security_studies": { |
|
"acc": true |
|
}, |
|
"mmlu_social_sciences": { |
|
"acc": true |
|
}, |
|
"mmlu_sociology": { |
|
"acc": true |
|
}, |
|
"mmlu_stem": { |
|
"acc": true |
|
}, |
|
"mmlu_us_foreign_policy": { |
|
"acc": true |
|
}, |
|
"mmlu_virology": { |
|
"acc": true |
|
}, |
|
"mmlu_world_religions": { |
|
"acc": true |
|
}, |
|
"piqa": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"sciq": { |
|
"acc": true, |
|
"acc_norm": true |
|
}, |
|
"wikitext": { |
|
"word_perplexity": false, |
|
"byte_perplexity": false, |
|
"bits_per_byte": false |
|
}, |
|
"winogrande": { |
|
"acc": true |
|
}, |
|
"wsc": { |
|
"acc": true |
|
} |
|
}, |
|
"n-samples": { |
|
"wsc": { |
|
"original": 104, |
|
"effective": 104 |
|
}, |
|
"winogrande": { |
|
"original": 1267, |
|
"effective": 1267 |
|
}, |
|
"wikitext": { |
|
"original": 62, |
|
"effective": 62 |
|
}, |
|
"sciq": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"piqa": { |
|
"original": 1838, |
|
"effective": 1838 |
|
}, |
|
"mmlu_elementary_mathematics": { |
|
"original": 378, |
|
"effective": 378 |
|
}, |
|
"mmlu_electrical_engineering": { |
|
"original": 145, |
|
"effective": 145 |
|
}, |
|
"mmlu_high_school_computer_science": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_high_school_physics": { |
|
"original": 151, |
|
"effective": 151 |
|
}, |
|
"mmlu_college_mathematics": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_college_chemistry": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_machine_learning": { |
|
"original": 112, |
|
"effective": 112 |
|
}, |
|
"mmlu_high_school_mathematics": { |
|
"original": 270, |
|
"effective": 270 |
|
}, |
|
"mmlu_computer_security": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_conceptual_physics": { |
|
"original": 235, |
|
"effective": 235 |
|
}, |
|
"mmlu_high_school_statistics": { |
|
"original": 216, |
|
"effective": 216 |
|
}, |
|
"mmlu_high_school_biology": { |
|
"original": 310, |
|
"effective": 310 |
|
}, |
|
"mmlu_astronomy": { |
|
"original": 152, |
|
"effective": 152 |
|
}, |
|
"mmlu_college_computer_science": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_college_biology": { |
|
"original": 144, |
|
"effective": 144 |
|
}, |
|
"mmlu_college_physics": { |
|
"original": 102, |
|
"effective": 102 |
|
}, |
|
"mmlu_anatomy": { |
|
"original": 135, |
|
"effective": 135 |
|
}, |
|
"mmlu_high_school_chemistry": { |
|
"original": 203, |
|
"effective": 203 |
|
}, |
|
"mmlu_abstract_algebra": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_college_medicine": { |
|
"original": 173, |
|
"effective": 173 |
|
}, |
|
"mmlu_medical_genetics": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_business_ethics": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_miscellaneous": { |
|
"original": 783, |
|
"effective": 783 |
|
}, |
|
"mmlu_nutrition": { |
|
"original": 306, |
|
"effective": 306 |
|
}, |
|
"mmlu_clinical_knowledge": { |
|
"original": 265, |
|
"effective": 265 |
|
}, |
|
"mmlu_human_aging": { |
|
"original": 223, |
|
"effective": 223 |
|
}, |
|
"mmlu_professional_accounting": { |
|
"original": 282, |
|
"effective": 282 |
|
}, |
|
"mmlu_marketing": { |
|
"original": 234, |
|
"effective": 234 |
|
}, |
|
"mmlu_global_facts": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_professional_medicine": { |
|
"original": 272, |
|
"effective": 272 |
|
}, |
|
"mmlu_virology": { |
|
"original": 166, |
|
"effective": 166 |
|
}, |
|
"mmlu_management": { |
|
"original": 103, |
|
"effective": 103 |
|
}, |
|
"mmlu_us_foreign_policy": { |
|
"original": 100, |
|
"effective": 100 |
|
}, |
|
"mmlu_sociology": { |
|
"original": 201, |
|
"effective": 201 |
|
}, |
|
"mmlu_econometrics": { |
|
"original": 114, |
|
"effective": 114 |
|
}, |
|
"mmlu_security_studies": { |
|
"original": 245, |
|
"effective": 245 |
|
}, |
|
"mmlu_high_school_geography": { |
|
"original": 198, |
|
"effective": 198 |
|
}, |
|
"mmlu_public_relations": { |
|
"original": 110, |
|
"effective": 110 |
|
}, |
|
"mmlu_high_school_microeconomics": { |
|
"original": 238, |
|
"effective": 238 |
|
}, |
|
"mmlu_professional_psychology": { |
|
"original": 612, |
|
"effective": 612 |
|
}, |
|
"mmlu_high_school_macroeconomics": { |
|
"original": 390, |
|
"effective": 390 |
|
}, |
|
"mmlu_human_sexuality": { |
|
"original": 131, |
|
"effective": 131 |
|
}, |
|
"mmlu_high_school_government_and_politics": { |
|
"original": 193, |
|
"effective": 193 |
|
}, |
|
"mmlu_high_school_psychology": { |
|
"original": 545, |
|
"effective": 545 |
|
}, |
|
"mmlu_moral_disputes": { |
|
"original": 346, |
|
"effective": 346 |
|
}, |
|
"mmlu_high_school_world_history": { |
|
"original": 237, |
|
"effective": 237 |
|
}, |
|
"mmlu_jurisprudence": { |
|
"original": 108, |
|
"effective": 108 |
|
}, |
|
"mmlu_philosophy": { |
|
"original": 311, |
|
"effective": 311 |
|
}, |
|
"mmlu_high_school_us_history": { |
|
"original": 204, |
|
"effective": 204 |
|
}, |
|
"mmlu_professional_law": { |
|
"original": 1534, |
|
"effective": 1534 |
|
}, |
|
"mmlu_logical_fallacies": { |
|
"original": 163, |
|
"effective": 163 |
|
}, |
|
"mmlu_moral_scenarios": { |
|
"original": 895, |
|
"effective": 895 |
|
}, |
|
"mmlu_formal_logic": { |
|
"original": 126, |
|
"effective": 126 |
|
}, |
|
"mmlu_prehistory": { |
|
"original": 324, |
|
"effective": 324 |
|
}, |
|
"mmlu_high_school_european_history": { |
|
"original": 165, |
|
"effective": 165 |
|
}, |
|
"mmlu_world_religions": { |
|
"original": 171, |
|
"effective": 171 |
|
}, |
|
"mmlu_international_law": { |
|
"original": 121, |
|
"effective": 121 |
|
}, |
|
"logiqa": { |
|
"original": 651, |
|
"effective": 651 |
|
}, |
|
"lambada_openai": { |
|
"original": 5153, |
|
"effective": 5153 |
|
}, |
|
"blimp_adjunct_island": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_anaphor_gender_agreement": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_anaphor_number_agreement": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_animate_subject_passive": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_animate_subject_trans": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_causative": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_complex_NP_island": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_coordinate_structure_constraint_complex_left_branch": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_coordinate_structure_constraint_object_extraction": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_irregular_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adj_irregular_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_determiner_noun_agreement_with_adjective_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_distractor_agreement_relational_noun": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_distractor_agreement_relative_clause": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_drop_argument": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_ellipsis_n_bar_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_ellipsis_n_bar_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_existential_there_object_raising": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_existential_there_quantifiers_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_existential_there_quantifiers_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_existential_there_subject_raising": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_expletive_it_object_raising": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_inchoative": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_intransitive": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_irregular_past_participle_adjectives": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_irregular_past_participle_verbs": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_irregular_plural_subject_verb_agreement_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_left_branch_island_echo_question": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_left_branch_island_simple_question": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_matrix_question_npi_licensor_present": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_npi_present_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_npi_present_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_only_npi_licensor_present": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_only_npi_scope": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_passive_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_passive_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_c_command": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_case_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_case_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_domain_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_domain_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_domain_3": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_principle_A_reconstruction": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_regular_plural_subject_verb_agreement_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_sentential_negation_npi_licensor_present": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_sentential_negation_npi_scope": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_sentential_subject_island": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_superlative_quantifiers_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_superlative_quantifiers_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_tough_vs_raising_1": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_tough_vs_raising_2": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_transitive": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_island": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_questions_object_gap": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_questions_subject_gap": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_questions_subject_gap_long_distance": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_vs_that_no_gap": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_vs_that_no_gap_long_distance": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_vs_that_with_gap": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"blimp_wh_vs_that_with_gap_long_distance": { |
|
"original": 1000, |
|
"effective": 1000 |
|
}, |
|
"arc_challenge": { |
|
"original": 1172, |
|
"effective": 1172 |
|
}, |
|
"arc_easy": { |
|
"original": 2376, |
|
"effective": 2376 |
|
} |
|
}, |
|
"config": { |
|
"model": "hf", |
|
"model_args": "pretrained=EleutherAI/pythia-70m,revision=step2,dtype=float,trust_remote_code=True", |
|
"model_num_parameters": 70426624, |
|
"model_dtype": "torch.float32", |
|
"model_revision": "step2", |
|
"model_sha": "4a776a8d96bd035a6c80b61cb50b93b050961238", |
|
"batch_size": "8", |
|
"batch_sizes": [], |
|
"device": "cuda:0", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null, |
|
"random_seed": 0, |
|
"numpy_seed": 1234, |
|
"torch_seed": 1234, |
|
"fewshot_seed": 1234 |
|
}, |
|
"git_hash": "a5b7c41", |
|
"date": 1729866560.4417374, |
|
"pretty_env_info": "PyTorch version: 2.5.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.3 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: 14.0.0-1ubuntu1.1\nCMake version: version 3.30.5\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Sep 11 2024, 15:47:36) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-6.1.85+-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.2.140\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: NVIDIA A100-SXM4-40GB\nNvidia driver version: 535.104.05\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.6\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.6\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.6\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.6\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.6\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.6\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.6\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 46 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 12\nOn-line CPU(s) list: 0-11\nVendor ID: GenuineIntel\nModel name: Intel(R) Xeon(R) CPU @ 2.20GHz\nCPU family: 6\nModel: 85\nThread(s) per core: 2\nCore(s) per socket: 6\nSocket(s): 1\nStepping: 7\nBogoMIPS: 4400.30\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm mpx avx512f avx512dq rdseed adx smap clflushopt clwb avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves arat avx512_vnni md_clear arch_capabilities\nHypervisor vendor: KVM\nVirtualization type: full\nL1d cache: 192 KiB (6 instances)\nL1i cache: 192 KiB (6 instances)\nL2 cache: 6 MiB (6 instances)\nL3 cache: 38.5 MiB (1 instance)\nNUMA node(s): 1\nNUMA node0 CPU(s): 0-11\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Vulnerable\nVulnerability Reg file data sampling: Not affected\nVulnerability Retbleed: Vulnerable\nVulnerability Spec rstack overflow: Not affected\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Vulnerable: __user pointer sanitization and usercopy barriers only; no swapgs barriers\nVulnerability Spectre v2: Vulnerable; IBPB: disabled; STIBP: disabled; PBRSB-eIBRS: Vulnerable; BHI: Vulnerable (Syscall hardening enabled)\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Vulnerable\n\nVersions of relevant libraries:\n[pip3] mypy==1.13.0\n[pip3] mypy-extensions==1.0.0\n[pip3] numpy==1.26.4\n[pip3] optree==0.13.0\n[pip3] torch==2.5.0+cu121\n[pip3] torchaudio==2.5.0+cu121\n[pip3] torchsummary==1.5.1\n[pip3] torchvision==0.20.0+cu121\n[conda] Could not collect", |
|
"transformers_version": "4.44.2", |
|
"upper_git_hash": null, |
|
"tokenizer_pad_token": [ |
|
"<|endoftext|>", |
|
"0" |
|
], |
|
"tokenizer_eos_token": [ |
|
"<|endoftext|>", |
|
"0" |
|
], |
|
"tokenizer_bos_token": [ |
|
"<|endoftext|>", |
|
"0" |
|
], |
|
"eot_token_id": 0, |
|
"max_length": 2048, |
|
"task_hashes": {}, |
|
"model_source": "hf", |
|
"model_name": "EleutherAI/pythia-70m", |
|
"model_name_sanitized": "EleutherAI__pythia-70m", |
|
"system_instruction": null, |
|
"system_instruction_sha": null, |
|
"fewshot_as_multiturn": false, |
|
"chat_template": null, |
|
"chat_template_sha": null, |
|
"start_time": 1442.043856738, |
|
"end_time": 2043.664356316, |
|
"total_evaluation_time_seconds": "601.6204995780001" |
|
} |