clefourrier HF Staff commited on
Commit
9ca6b68
·
1 Parent(s): 45b35ea

removed test from social_i_qa

Browse files
Files changed (1) hide show
  1. train.jsonl +1 -1
train.jsonl CHANGED
@@ -104,7 +104,7 @@
104
  {"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"}
105
  {"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "commonsense_qa"}
106
  {"name": "piqa", "hf_repo": "piqa", "hf_subset": "plain_text","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "piqa_helm"}
107
- {"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "siqa"}
108
  {"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
109
  {"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
110
  {"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
 
104
  {"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"}
105
  {"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "commonsense_qa"}
106
  {"name": "piqa", "hf_repo": "piqa", "hf_subset": "plain_text","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "piqa_helm"}
107
+ {"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "siqa"}
108
  {"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
109
  {"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
110
  {"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}