clefourrier HF Staff commited on
Commit
db5caec
·
1 Parent(s): fe96616

Update train.jsonl

Browse files
Files changed (1) hide show
  1. train.jsonl +1 -1
train.jsonl CHANGED
@@ -254,7 +254,7 @@
254
  {"name": "the_pile_nih-exporter", "hf_repo": "lighteval/pile_helm", "hf_subset": "nih-exporter", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
255
  {"name": "the_pile_stackexchange", "hf_repo": "lighteval/pile_helm", "hf_subset": "stackexchange", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
256
  {"name": "the_pile_pubmed-central", "hf_repo": "lighteval/pile_helm", "hf_subset": "pubmed-central", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
257
- {"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "truthful_qa_helm"}
258
  {"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
259
  {"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
260
  {"name": "wikifact_genre", "hf_repo": "lighteval/wikifact", "hf_subset": "genre", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"}
 
254
  {"name": "the_pile_nih-exporter", "hf_repo": "lighteval/pile_helm", "hf_subset": "nih-exporter", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
255
  {"name": "the_pile_stackexchange", "hf_repo": "lighteval/pile_helm", "hf_subset": "stackexchange", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
256
  {"name": "the_pile_pubmed-central", "hf_repo": "lighteval/pile_helm", "hf_subset": "pubmed-central", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
257
+ {"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "default","hf_avail_splits": ["train", "valid"], "evaluation_splits": ["valid"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "truthful_qa_helm"}
258
  {"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
259
  {"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
260
  {"name": "wikifact_genre", "hf_repo": "lighteval/wikifact", "hf_subset": "genre", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"}