Commit
·
631590c
1
Parent(s):
0907486
mmlu name change
Browse files- train.jsonl +5 -5
train.jsonl
CHANGED
@@ -145,11 +145,11 @@
|
|
145 |
{"name": "gsm8k", "hf_repo": "gsm8k", "hf_subset": "main", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 400, "stop_sequence": ["\n\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "gsm8k_helm"}
|
146 |
{"name": "imdb", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb"}
|
147 |
{"name": "imdb_contrastset", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb_contrastset"}
|
148 |
-
{"name": "interactive_qa_mmlu_college_chemistry", "hf_repo": "
|
149 |
-
{"name": "interactive_qa_mmlu_global_facts", "hf_repo": "
|
150 |
-
{"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "
|
151 |
-
{"name": "interactive_qa_mmlu_nutrition", "hf_repo": "
|
152 |
-
{"name": "interactive_qa_mmlu_us_foreign_policy", "hf_repo": "
|
153 |
{"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "billsum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
154 |
{"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "eurlexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
155 |
{"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "multilexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"}
|
|
|
145 |
{"name": "gsm8k", "hf_repo": "gsm8k", "hf_subset": "main", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 400, "stop_sequence": ["\n\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "gsm8k_helm"}
|
146 |
{"name": "imdb", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb"}
|
147 |
{"name": "imdb_contrastset", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb_contrastset"}
|
148 |
+
{"name": "interactive_qa_mmlu_college_chemistry", "hf_repo": "cais/mmlu", "hf_subset": "college_chemistry", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_college_chemistry"}
|
149 |
+
{"name": "interactive_qa_mmlu_global_facts", "hf_repo": "cais/mmlu", "hf_subset": "global_facts", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_global_facts"}
|
150 |
+
{"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "cais/mmlu", "hf_subset": "miscellaneous", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_miscellaneous"}
|
151 |
+
{"name": "interactive_qa_mmlu_nutrition", "hf_repo": "cais/mmlu", "hf_subset": "nutrition", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_nutrition"}
|
152 |
+
{"name": "interactive_qa_mmlu_us_foreign_policy", "hf_repo": "cais/mmlu", "hf_subset": "us_foreign_policy", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_us_foreign_policy"}
|
153 |
{"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "billsum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
154 |
{"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "eurlexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
155 |
{"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "multilexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"}
|