clefourrier HF Staff commited on
Commit
fe96616
·
1 Parent(s): 74c7b84

Update train.jsonl

Browse files
Files changed (1) hide show
  1. train.jsonl +0 -3
train.jsonl CHANGED
@@ -254,9 +254,6 @@
254
  {"name": "the_pile_nih-exporter", "hf_repo": "lighteval/pile_helm", "hf_subset": "nih-exporter", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
255
  {"name": "the_pile_stackexchange", "hf_repo": "lighteval/pile_helm", "hf_subset": "stackexchange", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
256
  {"name": "the_pile_pubmed-central", "hf_repo": "lighteval/pile_helm", "hf_subset": "pubmed-central", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
257
- {"name": "the_pile_ubuntu-irc", "hf_repo": "lighteval/pile_helm", "hf_subset": "ubuntu-irc", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
258
- {"name": "the_pile_bookcorpus", "hf_repo": "lighteval/pile_helm", "hf_subset": "bookcorpus", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
259
- {"name": "the_pile_philpapers", "hf_repo": "lighteval/pile_helm", "hf_subset": "philpapers", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
260
  {"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "truthful_qa_helm"}
261
  {"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
262
  {"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
 
254
  {"name": "the_pile_nih-exporter", "hf_repo": "lighteval/pile_helm", "hf_subset": "nih-exporter", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
255
  {"name": "the_pile_stackexchange", "hf_repo": "lighteval/pile_helm", "hf_subset": "stackexchange", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
256
  {"name": "the_pile_pubmed-central", "hf_repo": "lighteval/pile_helm", "hf_subset": "pubmed-central", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
 
 
 
257
  {"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "truthful_qa_helm"}
258
  {"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
259
  {"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}