|
{"name": "babi_qa", "hf_repo": "facebook/babi_qa", "hf_subset": "en-valid-qa1", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "babi_qa"} |
|
{"name": "bbq", "hf_repo": "lighteval/bbq_helm", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Age", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Age", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Disability_status", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Disability_status", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Gender_identity", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Gender_identity", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Nationality", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Nationality", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Physical_appearance", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Physical_appearance", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Race_ethnicity", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Race_ethnicity", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Race_x_SES", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Race_x_SES", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Race_x_gender", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Race_x_gender", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Religion", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Religion", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_SES", "hf_repo": "lighteval/bbq_helm", "hf_subset": "SES", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bbq_Sexual_orientation", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Sexual_orientation", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"} |
|
{"name": "bigbench_auto_debugging", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "auto_debugging", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-age_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-age_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-age_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-age_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-disability_status_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-disability_status_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-disability_status_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-disability_status_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-gender_identity_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-gender_identity_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-gender_identity_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-gender_identity_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-nationality_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-nationality_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-nationality_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-nationality_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-physical_appearance_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-physical_appearance_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-physical_appearance_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-physical_appearance_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-race_ethnicity_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-race_ethnicity_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-race_ethnicity_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-race_ethnicity_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-religion_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-religion_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-religion_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-religion_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-ses_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-ses_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-ses_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-ses_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-sexual_orientation_ambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-sexual_orientation_ambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_bbq_lite_json-sexual_orientation_disambig", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "bbq_lite_json-sexual_orientation_disambig", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_code_line_description", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "code_line_description", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conceptual_combinations-contradictions", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conceptual_combinations-contradictions", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conceptual_combinations-emergent_properties", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conceptual_combinations-emergent_properties", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conceptual_combinations-fanciful_fictional_combinations", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conceptual_combinations-fanciful_fictional_combinations", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conceptual_combinations-homonyms", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conceptual_combinations-homonyms", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conceptual_combinations-invented_words", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conceptual_combinations-invented_words", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-adna_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-adna_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-adna_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-adna_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-atikampe_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-atikampe_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-atikampe_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-atikampe_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-gornam_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-gornam_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-gornam_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-gornam_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-holuan_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-holuan_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-holuan_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-holuan_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-mkafala_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-mkafala_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-mkafala_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-mkafala_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-postpositive_english_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-postpositive_english_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-postpositive_english_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-postpositive_english_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-unapuri_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-unapuri_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-unapuri_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-unapuri_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-vaomi_from", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-vaomi_from", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_conlang_translation-vaomi_to", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "conlang_translation-vaomi_to", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["rouge_1", "rouge_2", "rouge_l"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_emoji_movie", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "emoji_movie", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_formal_fallacies_syllogisms_negation", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "formal_fallacies_syllogisms_negation", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_hindu_knowledge", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "hindu_knowledge", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_known_unknowns", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "known_unknowns", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_language_identification", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "language_identification", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_linguistics_puzzles", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "linguistics_puzzles", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_logic_grid_puzzle", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "logic_grid_puzzle", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_logical_deduction-three_objects", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "logical_deduction-three_objects", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_logical_deduction-five_objects", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "logical_deduction-five_objects", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_logical_deduction-seven_objects", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "logical_deduction-seven_objects", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_misconceptions_russian", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "misconceptions_russian", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_novel_concepts", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "novel_concepts", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_operators", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "operators", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_parsinlu_reading_comprehension", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "parsinlu_reading_comprehension", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_play_dialog_same_or_different", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "play_dialog_same_or_different", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_repeat_copy_logic", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "repeat_copy_logic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_strange_stories-boolean", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "strange_stories-boolean", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_strange_stories-multiple_choice", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "strange_stories-multiple_choice", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_strategyqa", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "strategyqa", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_symbol_interpretation-adversarial", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "symbol_interpretation-adversarial", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_symbol_interpretation-emoji_agnostic", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "symbol_interpretation-emoji_agnostic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_symbol_interpretation-name_agnostic", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "symbol_interpretation-name_agnostic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_symbol_interpretation-plain", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "symbol_interpretation-plain", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_symbol_interpretation-tricky", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "symbol_interpretation-tricky", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_vitaminc_fact_verification", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "vitaminc_fact_verification", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "bigbench_winowhy", "hf_repo": "lighteval/bigbench_helm", "hf_subset": "winowhy", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "bigbench_scenario"], "prompt_function": "bigbench"} |
|
{"name": "blimp_determiner_noun_agreement_with_adjective_1", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_with_adjective_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_left_branch_island_simple_question", "hf_repo": "blimp", "hf_subset": "left_branch_island_simple_question", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_matrix_question_npi_licensor_present", "hf_repo": "blimp", "hf_subset": "matrix_question_npi_licensor_present", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_irregular_2", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_irregular_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_animate_subject_passive", "hf_repo": "blimp", "hf_subset": "animate_subject_passive", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_irregular_past_participle_adjectives", "hf_repo": "blimp", "hf_subset": "irregular_past_participle_adjectives", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_irregular_past_participle_verbs", "hf_repo": "blimp", "hf_subset": "irregular_past_participle_verbs", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_domain_2", "hf_repo": "blimp", "hf_subset": "principle_A_domain_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_island", "hf_repo": "blimp", "hf_subset": "wh_island", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_only_npi_scope", "hf_repo": "blimp", "hf_subset": "only_npi_scope", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_questions_subject_gap", "hf_repo": "blimp", "hf_subset": "wh_questions_subject_gap", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_case_2", "hf_repo": "blimp", "hf_subset": "principle_A_case_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_with_adj_2", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_with_adj_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_sentential_negation_npi_licensor_present", "hf_repo": "blimp", "hf_subset": "sentential_negation_npi_licensor_present", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_npi_present_2", "hf_repo": "blimp", "hf_subset": "npi_present_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_c_command", "hf_repo": "blimp", "hf_subset": "principle_A_c_command", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_ellipsis_n_bar_2", "hf_repo": "blimp", "hf_subset": "ellipsis_n_bar_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_regular_plural_subject_verb_agreement_2", "hf_repo": "blimp", "hf_subset": "regular_plural_subject_verb_agreement_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_passive_1", "hf_repo": "blimp", "hf_subset": "passive_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_expletive_it_object_raising", "hf_repo": "blimp", "hf_subset": "expletive_it_object_raising", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_irregular_plural_subject_verb_agreement_2", "hf_repo": "blimp", "hf_subset": "irregular_plural_subject_verb_agreement_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_tough_vs_raising_1", "hf_repo": "blimp", "hf_subset": "tough_vs_raising_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_superlative_quantifiers_2", "hf_repo": "blimp", "hf_subset": "superlative_quantifiers_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_distractor_agreement_relational_noun", "hf_repo": "blimp", "hf_subset": "distractor_agreement_relational_noun", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_existential_there_quantifiers_1", "hf_repo": "blimp", "hf_subset": "existential_there_quantifiers_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_vs_that_no_gap", "hf_repo": "blimp", "hf_subset": "wh_vs_that_no_gap", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_drop_argument", "hf_repo": "blimp", "hf_subset": "drop_argument", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_only_npi_licensor_present", "hf_repo": "blimp", "hf_subset": "only_npi_licensor_present", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_intransitive", "hf_repo": "blimp", "hf_subset": "intransitive", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_anaphor_gender_agreement", "hf_repo": "blimp", "hf_subset": "anaphor_gender_agreement", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_1", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_with_adj_irregular_1", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_with_adj_irregular_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_anaphor_number_agreement", "hf_repo": "blimp", "hf_subset": "anaphor_number_agreement", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_domain_3", "hf_repo": "blimp", "hf_subset": "principle_A_domain_3", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_case_1", "hf_repo": "blimp", "hf_subset": "principle_A_case_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_npi_present_1", "hf_repo": "blimp", "hf_subset": "npi_present_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_ellipsis_n_bar_1", "hf_repo": "blimp", "hf_subset": "ellipsis_n_bar_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp", "hf_repo": "blimp", "hf_subset": null, "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_transitive", "hf_repo": "blimp", "hf_subset": "transitive", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_coordinate_structure_constraint_object_extraction", "hf_repo": "blimp", "hf_subset": "coordinate_structure_constraint_object_extraction", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_irregular_1", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_irregular_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_vs_that_with_gap", "hf_repo": "blimp", "hf_subset": "wh_vs_that_with_gap", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_vs_that_with_gap_long_distance", "hf_repo": "blimp", "hf_subset": "wh_vs_that_with_gap_long_distance", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_domain_1", "hf_repo": "blimp", "hf_subset": "principle_A_domain_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_principle_A_reconstruction", "hf_repo": "blimp", "hf_subset": "principle_A_reconstruction", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_existential_there_quantifiers_2", "hf_repo": "blimp", "hf_subset": "existential_there_quantifiers_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_sentential_negation_npi_scope", "hf_repo": "blimp", "hf_subset": "sentential_negation_npi_scope", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_animate_subject_trans", "hf_repo": "blimp", "hf_subset": "animate_subject_trans", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_complex_NP_island", "hf_repo": "blimp", "hf_subset": "complex_NP_island", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_inchoative", "hf_repo": "blimp", "hf_subset": "inchoative", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_distractor_agreement_relative_clause", "hf_repo": "blimp", "hf_subset": "distractor_agreement_relative_clause", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_2", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_determiner_noun_agreement_with_adj_irregular_2", "hf_repo": "blimp", "hf_subset": "determiner_noun_agreement_with_adj_irregular_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_left_branch_island_echo_question", "hf_repo": "blimp", "hf_subset": "left_branch_island_echo_question", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_questions_subject_gap_long_distance", "hf_repo": "blimp", "hf_subset": "wh_questions_subject_gap_long_distance", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_questions_object_gap", "hf_repo": "blimp", "hf_subset": "wh_questions_object_gap", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_regular_plural_subject_verb_agreement_1", "hf_repo": "blimp", "hf_subset": "regular_plural_subject_verb_agreement_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_coordinate_structure_constraint_complex_left_branch", "hf_repo": "blimp", "hf_subset": "coordinate_structure_constraint_complex_left_branch", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_adjunct_island", "hf_repo": "blimp", "hf_subset": "adjunct_island", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_existential_there_object_raising", "hf_repo": "blimp", "hf_subset": "existential_there_object_raising", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_wh_vs_that_no_gap_long_distance", "hf_repo": "blimp", "hf_subset": "wh_vs_that_no_gap_long_distance", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_existential_there_subject_raising", "hf_repo": "blimp", "hf_subset": "existential_there_subject_raising", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_passive_2", "hf_repo": "blimp", "hf_subset": "passive_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_causative", "hf_repo": "blimp", "hf_subset": "causative", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_superlative_quantifiers_1", "hf_repo": "blimp", "hf_subset": "superlative_quantifiers_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_sentential_subject_island", "hf_repo": "blimp", "hf_subset": "sentential_subject_island", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_tough_vs_raising_2", "hf_repo": "blimp", "hf_subset": "tough_vs_raising_2", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "blimp_irregular_plural_subject_verb_agreement_1", "hf_repo": "blimp", "hf_subset": "irregular_plural_subject_verb_agreement_1", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"],"metric": ["loglikelihood_acc", "loglikelihood_acc_norm"], "suite": ["helm", "blimp"], "prompt_function": "blimp_helm"} |
|
{"name": "bold", "hf_repo": "lighteval/bold_helm", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"} |
|
{"name": "bold_political_ideology", "hf_repo": "lighteval/bold_helm", "hf_subset": "political_ideology", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"} |
|
{"name": "bold_profession", "hf_repo": "lighteval/bold_helm", "hf_subset": "profession", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"} |
|
{"name": "bold_race", "hf_repo": "lighteval/bold_helm", "hf_subset": "race", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"} |
|
{"name": "bold_gender", "hf_repo": "lighteval/bold_helm", "hf_subset": "gender", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"} |
|
{"name": "bold_religious_ideology", "hf_repo": "lighteval/bold_helm", "hf_subset": "religious_ideology", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"} |
|
{"name": "boolq", "hf_repo": "lighteval/boolq_helm", "hf_subset": "default", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "boolq_helm"} |
|
{"name": "boolq_contrastset", "hf_repo": "lighteval/boolq_helm", "hf_subset": "default", "hf_avail_splits": ["validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "boolq_helm_contrastset"} |
|
{"name": "civil_comments_male", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "male", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_female", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "female", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_LGBTQ", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "LGBTQ", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_christian", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "christian", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_muslim", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "muslim", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_other_religions", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "other_religions", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_black", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "black", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments_white", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "white", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "civil_comments", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"} |
|
{"name": "dyck_language_2", "hf_repo": "lighteval/DyckLanguage", "hf_subset": "2", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "dyck_language"} |
|
{"name": "dyck_language_3", "hf_repo": "lighteval/DyckLanguage", "hf_subset": "3", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "dyck_language"} |
|
{"name": "dyck_language_4", "hf_repo": "lighteval/DyckLanguage", "hf_subset": "4", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "dyck_language"} |
|
{"name": "humaneval", "hf_repo": "openai_humaneval", "hf_subset": "openai_humaneval", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": 600, "stop_sequence": ["\nclass", "\ndef", "\nif", "\nprint"], "metric": ["code_eval_he", "bias", "toxicity"], "suite": ["helm", "code_scenario"], "prompt_function": "humaneval"} |
|
{"name": "apps", "hf_repo": "codeparrot/apps", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 600, "stop_sequence": ["'''", "---", "\"\"\"", "\n\n\n"], "metric": ["code_eval_apps", "bias", "toxicity"], "suite": ["helm", "code_scenario"], "prompt_function": "apps"} |
|
{"name": "hellaswag", "hf_repo": "hellaswag", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "hellaswag_helm"} |
|
{"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"} |
|
{"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "commonsense_qa"} |
|
{"name": "piqa", "hf_repo": "piqa", "hf_subset": "plain_text","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "piqa_helm"} |
|
{"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "siqa"} |
|
{"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_125", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_125", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_n_books_1000-extractions_per_book_3-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_3-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_n_books_1000-extractions_per_book_3-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_3-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_n_books_1000-extractions_per_book_3-prefix_length_125", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_3-prefix_length_125", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_popular_books-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "popular_books-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_popular_books-prefix_length_10", "hf_repo": "lighteval/copyright_helm", "hf_subset": "popular_books-prefix_length_10", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_popular_books-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "popular_books-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_popular_books-prefix_length_50", "hf_repo": "lighteval/copyright_helm", "hf_subset": "popular_books-prefix_length_50", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_popular_books-prefix_length_125", "hf_repo": "lighteval/copyright_helm", "hf_subset": "popular_books-prefix_length_125", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_popular_books-prefix_length_250", "hf_repo": "lighteval/copyright_helm", "hf_subset": "popular_books-prefix_length_250", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_oh_the_places", "hf_repo": "lighteval/copyright_helm", "hf_subset": "oh_the_places", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_prompt_num_line_1-min_lines_20", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_1-min_lines_20", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_prompt_num_line_5-min_lines_20", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_5-min_lines_20", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "copyright_prompt_num_line_10-min_lines_20", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_10-min_lines_20", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"} |
|
{"name": "covid_dialogue", "hf_repo": "lighteval/covid_dialogue", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "covid_dialogue"} |
|
{"name": "empathetic_dialogues", "hf_repo": "lighteval/empathetic_dialogues", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 50, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "empathetic_dialogue"} |
|
{"name": "disinformation_wedging", "hf_repo": "lighteval/disinformation", "hf_subset": "wedging", "hf_avail_splits": ["validation"], "evaluation_splits": ["validation"], "generation_size": 90, "stop_sequence": ["\nTweet", "\nReason"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_wedging"} |
|
{"name": "disinformation_climate", "hf_repo": "lighteval/disinformation", "hf_subset": "reiteration_climate", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_reiteration"} |
|
{"name": "disinformation_covid", "hf_repo": "lighteval/disinformation", "hf_subset": "reiteration_covid", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_reiteration"} |
|
{"name": "entity_data_imputation_Buy", "hf_repo": "lighteval/Buy", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_data_imputation"} |
|
{"name": "entity_data_imputation_Restaurant", "hf_repo": "lighteval/Restaurant", "hf_subset": "default","hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_data_imputation"} |
|
{"name": "entity_matching_Beer", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Beer", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_iTunes_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "iTunes_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Fodors_Zagats", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Fodors_Zagats", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_DBLP_ACM", "hf_repo": "lighteval/EntityMatching", "hf_subset": "DBLP_ACM", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_DBLP_GoogleScholar", "hf_repo": "lighteval/EntityMatching", "hf_subset": "DBLP_GoogleScholar", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Amazon_Google", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Amazon_Google", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Walmart_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Walmart_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Abt_Buy", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Abt_Buy", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Company", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Company", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Dirty_iTunes_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_iTunes_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Dirty_DBLP_ACM", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_DBLP_ACM", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Dirty_DBLP_GoogleScholar", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_DBLP_GoogleScholar", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "entity_matching_Dirty_Walmart_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_Walmart_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"} |
|
{"name": "gsm8k", "hf_repo": "gsm8k", "hf_subset": "main", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 400, "stop_sequence": ["\n\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "gsm8k_helm"} |
|
{"name": "imdb", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb"} |
|
{"name": "imdb_contrastset", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb_contrastset"} |
|
{"name": "interactive_qa_mmlu_college_chemistry", "hf_repo": "cais/mmlu", "hf_subset": "college_chemistry", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_college_chemistry"} |
|
{"name": "interactive_qa_mmlu_global_facts", "hf_repo": "cais/mmlu", "hf_subset": "global_facts", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_global_facts"} |
|
{"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "cais/mmlu", "hf_subset": "miscellaneous", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_miscellaneous"} |
|
{"name": "interactive_qa_mmlu_nutrition", "hf_repo": "cais/mmlu", "hf_subset": "nutrition", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_nutrition"} |
|
{"name": "interactive_qa_mmlu_us_foreign_policy", "hf_repo": "cais/mmlu", "hf_subset": "us_foreign_policy", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_us_foreign_policy"} |
|
{"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "BillSum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"} |
|
{"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "EurLexSum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"} |
|
{"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "MultiLexSum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"} |
|
{"name": "legalsupport", "hf_repo": "lighteval/LegalSupport", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "legal_support"} |
|
{"name": "lexglue_ecthr_a", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_a", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_a"} |
|
{"name": "lexglue_ecthr_b", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_b", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_b"} |
|
{"name": "lexglue_scotus", "hf_repo": "lighteval/lexglue", "hf_subset": "scotus", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_scotus"} |
|
{"name": "lexglue_eurlex", "hf_repo": "lighteval/lexglue", "hf_subset": "eurlex", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_eurlex"} |
|
{"name": "lexglue_ledgar", "hf_repo": "lighteval/lexglue", "hf_subset": "ledgar", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ledgar"} |
|
{"name": "lexglue_unfair_tos", "hf_repo": "lighteval/lexglue", "hf_subset": "unfair_tos", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_unfair_tos"} |
|
{"name": "lexglue_case_hold", "hf_repo": "lighteval/lexglue", "hf_subset": "case_hold", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_case_hold"} |
|
{"name": "lextreme_brazilian_court_decisions_judgment", "hf_repo": "lighteval/lextreme", "hf_subset": "brazilian_court_decisions_judgment", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_brazilian_court_decisions_judgment"} |
|
{"name": "lextreme_brazilian_court_decisions_unanimity", "hf_repo": "lighteval/lextreme", "hf_subset": "brazilian_court_decisions_unanimity", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_brazilian_court_decisions_unanimity"} |
|
{"name": "lextreme_german_argument_mining", "hf_repo": "lighteval/lextreme", "hf_subset": "german_argument_mining", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_german_argument_mining"} |
|
{"name": "lextreme_greek_legal_code_chapter", "hf_repo": "lighteval/lextreme", "hf_subset": "greek_legal_code_chapter", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_greek_legal_code_chapter"} |
|
{"name": "lextreme_greek_legal_code_subject", "hf_repo": "lighteval/lextreme", "hf_subset": "greek_legal_code_subject", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_greek_legal_code_subject"} |
|
{"name": "lextreme_greek_legal_code_volume", "hf_repo": "lighteval/lextreme", "hf_subset": "greek_legal_code_volume", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_greek_legal_code_volume"} |
|
{"name": "lextreme_swiss_judgment_prediction", "hf_repo": "lighteval/lextreme", "hf_subset": "swiss_judgment_prediction", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_swiss_judgment_prediction"} |
|
{"name": "lextreme_online_terms_of_service_unfairness_levels", "hf_repo": "lighteval/lextreme", "hf_subset": "online_terms_of_service_unfairness_levels", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 10, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_online_terms_of_service_unfairness_levels"} |
|
{"name": "lextreme_online_terms_of_service_clause_topics", "hf_repo": "lighteval/lextreme", "hf_subset": "online_terms_of_service_clause_topics", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 10, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_online_terms_of_service_clause_topics"} |
|
{"name": "lextreme_covid19_emergency_event", "hf_repo": "lighteval/lextreme", "hf_subset": "covid19_emergency_event", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 10, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_covid19_emergency_event"} |
|
{"name": "lextreme_multi_eurlex_level_1", "hf_repo": "lighteval/lextreme", "hf_subset": "multi_eurlex_level_1", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 10, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_multi_eurlex_level_1"} |
|
{"name": "lextreme_multi_eurlex_level_2", "hf_repo": "lighteval/lextreme", "hf_subset": "multi_eurlex_level_2", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 10, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_multi_eurlex_level_2"} |
|
{"name": "lextreme_multi_eurlex_level_3", "hf_repo": "lighteval/lextreme", "hf_subset": "multi_eurlex_level_3", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 10, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_multi_eurlex_level_3"} |
|
{"name": "lextreme_greek_legal_ner", "hf_repo": "lighteval/lextreme", "hf_subset": "greek_legal_ner", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 430, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_greek_legal_ner"} |
|
{"name": "lextreme_legalnero", "hf_repo": "lighteval/lextreme", "hf_subset": "legalnero", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 788, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_legalnero"} |
|
{"name": "lextreme_lener_br", "hf_repo": "lighteval/lextreme", "hf_subset": "lener_br", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 338, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_lener_br"} |
|
{"name": "lextreme_mapa_coarse", "hf_repo": "lighteval/lextreme", "hf_subset": "mapa_coarse", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 274, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_mapa_coarse"} |
|
{"name": "lextreme_mapa_fine", "hf_repo": "lighteval/lextreme", "hf_subset": "mapa_fine", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 274, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lextreme_scenario"], "prompt_function": "lextreme_mapa_fine"} |
|
{"name": "lsat_qa_grouping", "hf_repo": "lighteval/lsat_qa", "hf_subset": "grouping", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"} |
|
{"name": "lsat_qa_ordering", "hf_repo": "lighteval/lsat_qa", "hf_subset": "ordering", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"} |
|
{"name": "lsat_qa_assignment", "hf_repo": "lighteval/lsat_qa", "hf_subset": "assignment", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"} |
|
{"name": "lsat_qa_miscellaneous", "hf_repo": "lighteval/lsat_qa", "hf_subset": "miscellaneous", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"} |
|
{"name": "lsat_qa_all", "hf_repo": "lighteval/lsat_qa", "hf_subset": "all", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"} |
|
{"name": "me_q_sum", "hf_repo": "lighteval/me_q_sum", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence":["###"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "me_q_sum"} |
|
{"name": "med_dialog_healthcaremagic", "hf_repo": "lighteval/med_dialog", "hf_subset": "healthcaremagic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"} |
|
{"name": "med_dialog_icliniq", "hf_repo": "lighteval/med_dialog", "hf_subset": "icliniq", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"} |
|
{"name": "med_mcqa", "hf_repo": "lighteval/med_mcqa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "med_mcqa"} |
|
{"name": "med_paragraph_simplification", "hf_repo": "lighteval/med_paragraph_simplification", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 512, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_paragraph_simplification"} |
|
{"name": "med_qa", "hf_repo": "bigbio/med_qa", "hf_subset": "med_qa_en_source", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "med_qa"} |
|
{"name": "mmlu", "hf_repo": "lighteval/mmlu", "hf_subset": "all","hf_avail_splits": ["auxiliary_train", "test", "validation", "dev"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "mmlu"} |
|
{"name": "ms_marco_regular", "hf_repo": "lighteval/ms_marco", "hf_subset": "regular", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["ranking"], "suite": ["helm"], "prompt_function": "ms_marco"} |
|
{"name": "ms_marco_trec", "hf_repo": "lighteval/ms_marco", "hf_subset": "trec", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["ranking"], "suite": ["helm"], "prompt_function": "ms_marco"} |
|
{"name": "narrativeqa", "hf_repo": "narrativeqa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "narrativeqa"} |
|
{"name": "numeracy_linear_example", "hf_repo": "lighteval/numeracy", "hf_subset": "linear_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_linear_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "linear_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_parabola_example", "hf_repo": "lighteval/numeracy", "hf_subset": "parabola_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_parabola_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "parabola_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_plane_example", "hf_repo": "lighteval/numeracy", "hf_subset": "plane_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_plane_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "plane_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_paraboloid_example", "hf_repo": "lighteval/numeracy", "hf_subset": "paraboloid_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "numeracy_paraboloid_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "paraboloid_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"} |
|
{"name": "pubmed_qa", "hf_repo": "pubmed_qa", "hf_subset": "pqa_labeled", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "pubmed_qa_helm"} |
|
{"name": "quac", "hf_repo": "quac", "hf_subset": "plain_text","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "quac"} |
|
{"name": "raft_ade_corpus_v2", "hf_repo": "ought/raft", "hf_subset": "ade_corpus_v2", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_banking_77", "hf_repo": "ought/raft", "hf_subset": "banking_77", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_neurips_impact_statement_risks", "hf_repo": "ought/raft", "hf_subset": "neurips_impact_statement_risks", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_one_stop_english", "hf_repo": "ought/raft", "hf_subset": "one_stop_english", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_overruling", "hf_repo": "ought/raft", "hf_subset": "overruling", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_semiconductor_org_types", "hf_repo": "ought/raft", "hf_subset": "semiconductor_org_types", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_systematic_review_inclusion", "hf_repo": "ought/raft", "hf_subset": "systematic_review_inclusion", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_tai_safety_research", "hf_repo": "ought/raft", "hf_subset": "tai_safety_research", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_terms_of_service", "hf_repo": "ought/raft", "hf_subset": "terms_of_service", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_tweet_eval_hate", "hf_repo": "ought/raft", "hf_subset": "tweet_eval_hate", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "raft_twitter_complaints", "hf_repo": "ought/raft", "hf_subset": "twitter_complaints", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"} |
|
{"name": "real_toxicity_prompts", "hf_repo": "allenai/real-toxicity-prompts", "hf_subset": "default","hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "real_toxicity_prompts"} |
|
{"name": "summarization_xsum", "hf_repo": "lighteval/summarization", "hf_subset": "xsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 64, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "xsum"} |
|
{"name": "summarization_xsum-sampled", "hf_repo": "lighteval/summarization", "hf_subset": "xsum-sampled", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 64, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "xsum"} |
|
{"name": "summarization_cnn-dm", "hf_repo": "lighteval/summarization", "hf_subset": "cnn-dm", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "cnn_dm"} |
|
{"name": "synthetic_reasoning_natural_easy", "hf_repo": "lighteval/synthetic_reasoning_natural", "hf_subset": "easy", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["f1_set_match", "iou_set_match", "exact_set_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "synthetic_reasoning_natural"} |
|
{"name": "synthetic_reasoning_natural_hard", "hf_repo": "lighteval/synthetic_reasoning_natural", "hf_subset": "hard", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["f1_set_match", "iou_set_match", "exact_set_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "synthetic_reasoning_natural"} |
|
{"name": "synthetic_reasoning_variable_substitution", "hf_repo": "lighteval/synthetic_reasoning", "hf_subset": "variable_substitution", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 50, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "synthetic_reasoning"} |
|
{"name": "synthetic_reasoning_pattern_match", "hf_repo": "lighteval/synthetic_reasoning", "hf_subset": "pattern_match", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 50, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "synthetic_reasoning"} |
|
{"name": "synthetic_reasoning_induction", "hf_repo": "lighteval/synthetic_reasoning", "hf_subset": "induction", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 50, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "synthetic_reasoning"} |
|
{"name": "the_pile_github", "hf_repo": "lighteval/pile_helm", "hf_subset": "github", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_arxiv", "hf_repo": "lighteval/pile_helm", "hf_subset": "arxiv", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_wikipedia", "hf_repo": "lighteval/pile_helm", "hf_subset": "wikipedia", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_opensubtitles", "hf_repo": "lighteval/pile_helm", "hf_subset": "opensubtitles", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_openwebtext2", "hf_repo": "lighteval/pile_helm", "hf_subset": "openwebtext2", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_gutenberg", "hf_repo": "lighteval/pile_helm", "hf_subset": "gutenberg", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_dm-mathematics", "hf_repo": "lighteval/pile_helm", "hf_subset": "dm-mathematics", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_enron", "hf_repo": "lighteval/pile_helm", "hf_subset": "enron", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_bibliotik", "hf_repo": "lighteval/pile_helm", "hf_subset": "bibliotik", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_pubmed-abstracts", "hf_repo": "lighteval/pile_helm", "hf_subset": "pubmed-abstracts", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_youtubesubtitles", "hf_repo": "lighteval/pile_helm", "hf_subset": "youtubesubtitles", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_hackernews", "hf_repo": "lighteval/pile_helm", "hf_subset": "hackernews", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_commoncrawl", "hf_repo": "lighteval/pile_helm", "hf_subset": "commoncrawl", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_europarl", "hf_repo": "lighteval/pile_helm", "hf_subset": "europarl", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_uspto", "hf_repo": "lighteval/pile_helm", "hf_subset": "uspto", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_freelaw", "hf_repo": "lighteval/pile_helm", "hf_subset": "freelaw", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_nih-exporter", "hf_repo": "lighteval/pile_helm", "hf_subset": "nih-exporter", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_stackexchange", "hf_repo": "lighteval/pile_helm", "hf_subset": "stackexchange", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "the_pile_pubmed-central", "hf_repo": "lighteval/pile_helm", "hf_subset": "pubmed-central", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"} |
|
{"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "default","hf_avail_splits": ["train", "valid"], "evaluation_splits": ["valid"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "truthful_qa_helm"} |
|
{"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"} |
|
{"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"} |
|
{"name": "wikifact_genre", "hf_repo": "lighteval/wikifact", "hf_subset": "genre", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_instrument", "hf_repo": "lighteval/wikifact", "hf_subset": "instrument", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_author", "hf_repo": "lighteval/wikifact", "hf_subset": "author", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_creator", "hf_repo": "lighteval/wikifact", "hf_subset": "creator", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_composer", "hf_repo": "lighteval/wikifact", "hf_subset": "composer", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_director", "hf_repo": "lighteval/wikifact", "hf_subset": "director", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_applies_to_jurisdiction", "hf_repo": "lighteval/wikifact", "hf_subset": "applies_to_jurisdiction", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_overrules", "hf_repo": "lighteval/wikifact", "hf_subset": "overrules", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_laws_applied", "hf_repo": "lighteval/wikifact", "hf_subset": "laws_applied", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_repealed_by", "hf_repo": "lighteval/wikifact", "hf_subset": "repealed_by", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_majority_opinion_by", "hf_repo": "lighteval/wikifact", "hf_subset": "majority_opinion_by", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_plaintiff", "hf_repo": "lighteval/wikifact", "hf_subset": "plaintiff", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_defendant", "hf_repo": "lighteval/wikifact", "hf_subset": "defendant", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_movement", "hf_repo": "lighteval/wikifact", "hf_subset": "movement", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_influenced_by", "hf_repo": "lighteval/wikifact", "hf_subset": "influenced_by", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_member_of_political_party", "hf_repo": "lighteval/wikifact", "hf_subset": "member_of_political_party", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_diplomatic_relation", "hf_repo": "lighteval/wikifact", "hf_subset": "diplomatic_relation", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_position_held", "hf_repo": "lighteval/wikifact", "hf_subset": "position_held", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_basic_form_of_government", "hf_repo": "lighteval/wikifact", "hf_subset": "basic_form_of_government", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_office_held_by_head_of_state", "hf_repo": "lighteval/wikifact", "hf_subset": "office_held_by_head_of_state", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_head_of_state", "hf_repo": "lighteval/wikifact", "hf_subset": "head_of_state", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_office_held_by_head_of_government", "hf_repo": "lighteval/wikifact", "hf_subset": "office_held_by_head_of_government", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_head_of_government", "hf_repo": "lighteval/wikifact", "hf_subset": "head_of_government", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_shares_border_with", "hf_repo": "lighteval/wikifact", "hf_subset": "shares_border_with", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_continent", "hf_repo": "lighteval/wikifact", "hf_subset": "continent", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_capital", "hf_repo": "lighteval/wikifact", "hf_subset": "capital", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_capital_of", "hf_repo": "lighteval/wikifact", "hf_subset": "capital_of", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_country", "hf_repo": "lighteval/wikifact", "hf_subset": "country", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_twinned_administrative_body", "hf_repo": "lighteval/wikifact", "hf_subset": "twinned_administrative_body", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_currency", "hf_repo": "lighteval/wikifact", "hf_subset": "currency", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_central_bank", "hf_repo": "lighteval/wikifact", "hf_subset": "central_bank", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_subsidiary", "hf_repo": "lighteval/wikifact", "hf_subset": "subsidiary", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_stock_exchange", "hf_repo": "lighteval/wikifact", "hf_subset": "stock_exchange", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_industry", "hf_repo": "lighteval/wikifact", "hf_subset": "industry", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_statement_describes", "hf_repo": "lighteval/wikifact", "hf_subset": "statement_describes", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_solved_by", "hf_repo": "lighteval/wikifact", "hf_subset": "solved_by", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_programming_language", "hf_repo": "lighteval/wikifact", "hf_subset": "programming_language", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_file_extension", "hf_repo": "lighteval/wikifact", "hf_subset": "file_extension", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_number_of_processor_cores", "hf_repo": "lighteval/wikifact", "hf_subset": "number_of_processor_cores", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_operating_system", "hf_repo": "lighteval/wikifact", "hf_subset": "operating_system", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_measured_physical_quantity", "hf_repo": "lighteval/wikifact", "hf_subset": "measured_physical_quantity", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_recommended_unit_of_measurement", "hf_repo": "lighteval/wikifact", "hf_subset": "recommended_unit_of_measurement", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_atomic_number", "hf_repo": "lighteval/wikifact", "hf_subset": "atomic_number", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_electron_configuration", "hf_repo": "lighteval/wikifact", "hf_subset": "electron_configuration", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_discoverer_or_inventor", "hf_repo": "lighteval/wikifact", "hf_subset": "discoverer_or_inventor", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_time_of_discovery_or_invention", "hf_repo": "lighteval/wikifact", "hf_subset": "time_of_discovery_or_invention", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_location_of_discovery", "hf_repo": "lighteval/wikifact", "hf_subset": "location_of_discovery", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_medical_condition_treated", "hf_repo": "lighteval/wikifact", "hf_subset": "medical_condition_treated", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_drug_or_therapy_used_for_treatment", "hf_repo": "lighteval/wikifact", "hf_subset": "drug_or_therapy_used_for_treatment", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_genetic_association", "hf_repo": "lighteval/wikifact", "hf_subset": "genetic_association", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_therapeutic_area", "hf_repo": "lighteval/wikifact", "hf_subset": "therapeutic_area", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_symptoms_and_signs", "hf_repo": "lighteval/wikifact", "hf_subset": "symptoms_and_signs", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_place_of_birth", "hf_repo": "lighteval/wikifact", "hf_subset": "place_of_birth", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_place_of_death", "hf_repo": "lighteval/wikifact", "hf_subset": "place_of_death", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_subclass_of", "hf_repo": "lighteval/wikifact", "hf_subset": "subclass_of", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_official_language", "hf_repo": "lighteval/wikifact", "hf_subset": "official_language", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_position_played_on_team", "hf_repo": "lighteval/wikifact", "hf_subset": "position_played_on_team", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_member_of_sports_team", "hf_repo": "lighteval/wikifact", "hf_subset": "member_of_sports_team", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_award_received", "hf_repo": "lighteval/wikifact", "hf_subset": "award_received", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_original_network", "hf_repo": "lighteval/wikifact", "hf_subset": "original_network", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_educated_at", "hf_repo": "lighteval/wikifact", "hf_subset": "educated_at", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_named_after", "hf_repo": "lighteval/wikifact", "hf_subset": "named_after", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_original_language_of_film_or_TV_show", "hf_repo": "lighteval/wikifact", "hf_subset": "original_language_of_film_or_TV_show", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_member_of", "hf_repo": "lighteval/wikifact", "hf_subset": "member_of", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_field_of_work", "hf_repo": "lighteval/wikifact", "hf_subset": "field_of_work", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_participating_team", "hf_repo": "lighteval/wikifact", "hf_subset": "participating_team", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_occupation", "hf_repo": "lighteval/wikifact", "hf_subset": "occupation", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_has_part", "hf_repo": "lighteval/wikifact", "hf_subset": "has_part", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_manufacturer", "hf_repo": "lighteval/wikifact", "hf_subset": "manufacturer", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_developer", "hf_repo": "lighteval/wikifact", "hf_subset": "developer", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_country_of_citizenship", "hf_repo": "lighteval/wikifact", "hf_subset": "country_of_citizenship", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_language_of_work_or_name", "hf_repo": "lighteval/wikifact", "hf_subset": "language_of_work_or_name", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_located_in_the_administrative_territorial_entity", "hf_repo": "lighteval/wikifact", "hf_subset": "located_in_the_administrative_territorial_entity", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_languages_spoken_written_or_signed", "hf_repo": "lighteval/wikifact", "hf_subset": "languages_spoken_written_or_signed", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_employer", "hf_repo": "lighteval/wikifact", "hf_subset": "employer", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_record_label", "hf_repo": "lighteval/wikifact", "hf_subset": "record_label", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_location", "hf_repo": "lighteval/wikifact", "hf_subset": "location", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_work_location", "hf_repo": "lighteval/wikifact", "hf_subset": "work_location", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_religion", "hf_repo": "lighteval/wikifact", "hf_subset": "religion", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_owned_by", "hf_repo": "lighteval/wikifact", "hf_subset": "owned_by", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_native_language", "hf_repo": "lighteval/wikifact", "hf_subset": "native_language", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_instance_of", "hf_repo": "lighteval/wikifact", "hf_subset": "instance_of", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_country_of_origin", "hf_repo": "lighteval/wikifact", "hf_subset": "country_of_origin", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_headquarters_location", "hf_repo": "lighteval/wikifact", "hf_subset": "headquarters_location", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_location_of_formation", "hf_repo": "lighteval/wikifact", "hf_subset": "location_of_formation", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikifact_part_of", "hf_repo": "lighteval/wikifact", "hf_subset": "part_of", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"} |
|
{"name": "wikitext_103", "hf_repo": "lighteval/wikitext_103", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "wikitext_103"} |
|
{"name": "wmt_14_cs-en", "hf_repo": "lighteval/wmt_14", "hf_subset": "cs-en", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bleu"], "suite": ["helm"], "prompt_function": "wmt_14_cs_en"} |
|
{"name": "wmt_14_de-en", "hf_repo": "lighteval/wmt_14", "hf_subset": "de-en", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bleu"], "suite": ["helm"], "prompt_function": "wmt_14_de_en"} |
|
{"name": "wmt_14_fr-en", "hf_repo": "lighteval/wmt_14", "hf_subset": "fr-en", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bleu"], "suite": ["helm"], "prompt_function": "wmt_14_fr_en"} |
|
{"name": "wmt_14_hi-en", "hf_repo": "lighteval/wmt_14", "hf_subset": "hi-en", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bleu"], "suite": ["helm"], "prompt_function": "wmt_14_hi_en"} |
|
{"name": "wmt_14_ru-en", "hf_repo": "lighteval/wmt_14", "hf_subset": "ru-en", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bleu"], "suite": ["helm"], "prompt_function": "wmt_14_ru_en"} |