Commit
·
45b35ea
1
Parent(s):
2650e16
removed test from commonsense qa
Browse files- train.jsonl +1 -1
train.jsonl
CHANGED
@@ -102,7 +102,7 @@
|
|
102 |
{"name": "apps", "hf_repo": "codeparrot/apps", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 600, "stop_sequence": ["'''", "---", "\"\"\"", "\n\n\n"], "metric": ["code_eval_apps", "bias", "toxicity"], "suite": ["helm", "code_scenario"], "prompt_function": "apps"}
|
103 |
{"name": "hellaswag", "hf_repo": "hellaswag", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "hellaswag_helm"}
|
104 |
{"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"}
|
105 |
-
{"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"
|
106 |
{"name": "piqa", "hf_repo": "piqa", "hf_subset": "plain_text","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "piqa_helm"}
|
107 |
{"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "siqa"}
|
108 |
{"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
|
|
102 |
{"name": "apps", "hf_repo": "codeparrot/apps", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 600, "stop_sequence": ["'''", "---", "\"\"\"", "\n\n\n"], "metric": ["code_eval_apps", "bias", "toxicity"], "suite": ["helm", "code_scenario"], "prompt_function": "apps"}
|
103 |
{"name": "hellaswag", "hf_repo": "hellaswag", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "hellaswag_helm"}
|
104 |
{"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"}
|
105 |
+
{"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "commonsense_qa"}
|
106 |
{"name": "piqa", "hf_repo": "piqa", "hf_subset": "plain_text","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "piqa_helm"}
|
107 |
{"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "siqa"}
|
108 |
{"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|