clefourrier HF Staff commited on
Commit
2650e16
·
1 Parent(s): bb9b23a

removed math, so sloow to load

Browse files
Files changed (1) hide show
  1. train.jsonl +0 -7
train.jsonl CHANGED
@@ -184,13 +184,6 @@
184
  {"name": "lsat_qa_assignment", "hf_repo": "lighteval/lsat_qa", "hf_subset": "assignment", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"}
185
  {"name": "lsat_qa_miscellaneous", "hf_repo": "lighteval/lsat_qa", "hf_subset": "miscellaneous", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"}
186
  {"name": "lsat_qa_all", "hf_repo": "lighteval/lsat_qa", "hf_subset": "all", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"}
187
- {"name": "math_algebra", "hf_repo": "lighteval/math", "hf_subset": "algebra", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
188
- {"name": "math_counting_and_probability", "hf_repo": "lighteval/math", "hf_subset": "counting_and_probability", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
189
- {"name": "math_geometry", "hf_repo": "lighteval/math", "hf_subset": "geometry", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
190
- {"name": "math_intermediate_algebra", "hf_repo": "lighteval/math", "hf_subset": "intermediate_algebra", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
191
- {"name": "math_number_theory", "hf_repo": "lighteval/math", "hf_subset": "number_theory", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
192
- {"name": "math_prealgebra", "hf_repo": "lighteval/math", "hf_subset": "prealgebra", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
193
- {"name": "math_precalculus", "hf_repo": "lighteval/math", "hf_subset": "precalculus", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
194
  {"name": "me_q_sum", "hf_repo": "lighteval/me_q_sum", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence":["###"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "me_q_sum"}
195
  {"name": "med_dialog_healthcaremagic", "hf_repo": "lighteval/med_dialog", "hf_subset": "healthcaremagic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
196
  {"name": "med_dialog_icliniq", "hf_repo": "lighteval/med_dialog", "hf_subset": "icliniq", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
 
184
  {"name": "lsat_qa_assignment", "hf_repo": "lighteval/lsat_qa", "hf_subset": "assignment", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"}
185
  {"name": "lsat_qa_miscellaneous", "hf_repo": "lighteval/lsat_qa", "hf_subset": "miscellaneous", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"}
186
  {"name": "lsat_qa_all", "hf_repo": "lighteval/lsat_qa", "hf_subset": "all", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "lsat_qa_scenario"], "prompt_function": "lsat_qa"}
 
 
 
 
 
 
 
187
  {"name": "me_q_sum", "hf_repo": "lighteval/me_q_sum", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence":["###"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "me_q_sum"}
188
  {"name": "med_dialog_healthcaremagic", "hf_repo": "lighteval/med_dialog", "hf_subset": "healthcaremagic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
189
  {"name": "med_dialog_icliniq", "hf_repo": "lighteval/med_dialog", "hf_subset": "icliniq", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}