clefourrier HF Staff commited on
Commit
bf7fe4d
·
1 Parent(s): 631590c

legal summarization dataset keys

Browse files
Files changed (1) hide show
  1. train.jsonl +3 -3
train.jsonl CHANGED
@@ -150,9 +150,9 @@
150
  {"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "cais/mmlu", "hf_subset": "miscellaneous", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_miscellaneous"}
151
  {"name": "interactive_qa_mmlu_nutrition", "hf_repo": "cais/mmlu", "hf_subset": "nutrition", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_nutrition"}
152
  {"name": "interactive_qa_mmlu_us_foreign_policy", "hf_repo": "cais/mmlu", "hf_subset": "us_foreign_policy", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_us_foreign_policy"}
153
- {"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "billsum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
154
- {"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "eurlexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
155
- {"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "multilexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"}
156
  {"name": "legalsupport", "hf_repo": "lighteval/LegalSupport", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "legal_support"}
157
  {"name": "lexglue_ecthr_a", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_a", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_a"}
158
  {"name": "lexglue_ecthr_b", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_b", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_b"}
 
150
  {"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "cais/mmlu", "hf_subset": "miscellaneous", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_miscellaneous"}
151
  {"name": "interactive_qa_mmlu_nutrition", "hf_repo": "cais/mmlu", "hf_subset": "nutrition", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_nutrition"}
152
  {"name": "interactive_qa_mmlu_us_foreign_policy", "hf_repo": "cais/mmlu", "hf_subset": "us_foreign_policy", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_us_foreign_policy"}
153
+ {"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "BillSum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
154
+ {"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "EurLexSum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
155
+ {"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "MultiLexSum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"}
156
  {"name": "legalsupport", "hf_repo": "lighteval/LegalSupport", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "legal_support"}
157
  {"name": "lexglue_ecthr_a", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_a", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_a"}
158
  {"name": "lexglue_ecthr_b", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_b", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_b"}