{ "results": { "arc_challenge": { "acc,none": 0.3003412969283277, "acc_stderr,none": 0.013395909309957, "acc_norm,none": 0.3267918088737201, "acc_norm_stderr,none": 0.013706665975587336 }, "arc_easy": { "acc,none": 0.6485690235690236, "acc_stderr,none": 0.00979639558281772, "acc_norm,none": 0.5656565656565656, "acc_norm_stderr,none": 0.010170943451269428 }, "boolq": { "acc,none": 0.6467889908256881, "acc_stderr,none": 0.008359705247064303 }, "hellaswag": { "acc,none": 0.45160326628161723, "acc_stderr,none": 0.004966351835028203, "acc_norm,none": 0.5870344552877913, "acc_norm_stderr,none": 0.004913604782665858 }, "lambada_openai": { "perplexity,none": 4.911988887150992, "perplexity_stderr,none": 0.12302146370246485, "acc,none": 0.6343877352998254, "acc_stderr,none": 0.006709649590864073 }, "openbookqa": { "acc,none": 0.254, "acc_stderr,none": 0.01948659680164338, "acc_norm,none": 0.37, "acc_norm_stderr,none": 0.021613289165165785 }, "piqa": { "acc,none": 0.7448313384113167, "acc_stderr,none": 0.010171571592521822, "acc_norm,none": 0.7404787812840044, "acc_norm_stderr,none": 0.01022793988817392 }, "sciq": { "acc,none": 0.872, "acc_stderr,none": 0.010570133761108656, "acc_norm,none": 0.801, "acc_norm_stderr,none": 0.012631649083099182 }, "wikitext": { "word_perplexity,none": 20.722018295590285, "byte_perplexity,none": 1.6482397447214083, "bits_per_byte,none": 0.720926104999623 }, "winogrande": { "acc,none": 0.5887924230465666, "acc_stderr,none": 0.013829128358676862 } }, "configs": { "arc_challenge": { "task": "arc_challenge", "group": [ "ai2_arc", "multiple_choice" ], "dataset_path": "ai2_arc", "dataset_name": "ARC-Challenge", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Question: {{question}}\nAnswer:", "doc_to_target": "{{choices.label.index(answerKey)}}", "doc_to_choice": "{{choices.text}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "Question: {{question}}\nAnswer:" }, "arc_easy": { "task": "arc_easy", "group": [ "ai2_arc", "multiple_choice" ], "dataset_path": "ai2_arc", "dataset_name": "ARC-Easy", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Question: {{question}}\nAnswer:", "doc_to_target": "{{choices.label.index(answerKey)}}", "doc_to_choice": "{{choices.text}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "Question: {{question}}\nAnswer:" }, "boolq": { "task": "boolq", "group": [ "super-glue-lm-eval-v1" ], "dataset_path": "super_glue", "dataset_name": "boolq", "training_split": "train", "validation_split": "validation", "doc_to_text": "{{passage}}\nQuestion: {{question}}?\nAnswer:", "doc_to_target": "label", "doc_to_choice": [ "no", "yes" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc" } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "passage" }, "hellaswag": { "task": "hellaswag", "group": [ "multiple_choice" ], "dataset_path": "hellaswag", "training_split": "train", "validation_split": "validation", "doc_to_text": "{% set text = activity_label ~ ': ' ~ ctx_a ~ ' ' ~ ctx_b.capitalize() %}{{text|trim|replace(' [title]', '. ')|regex_replace('\\[.*?\\]', '')|replace(' ', ' ')}}", "doc_to_target": "{{label}}", "doc_to_choice": "{{endings|map('trim')|map('replace', ' [title]', '. ')|map('regex_replace', '\\[.*?\\]', '')|map('replace', ' ', ' ')|list}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false }, "lambada_openai": { "task": "lambada_openai", "group": [ "lambada", "loglikelihood", "perplexity" ], "dataset_path": "EleutherAI/lambada_openai", "dataset_name": "default", "test_split": "test", "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}", "doc_to_target": "{{' '+text.split(' ')[-1]}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "perplexity", "aggregation": "perplexity", "higher_is_better": false }, { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "loglikelihood", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{text}}" }, "openbookqa": { "task": "openbookqa", "group": [ "multiple_choice" ], "dataset_path": "openbookqa", "dataset_name": "main", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "question_stem", "doc_to_target": "{{choices.label.index(answerKey.lstrip())}}", "doc_to_choice": "{{choices.text}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "question_stem" }, "piqa": { "task": "piqa", "group": [ "multiple_choice" ], "dataset_path": "piqa", "training_split": "train", "validation_split": "validation", "doc_to_text": "Question: {{goal}}\nAnswer:", "doc_to_target": "label", "doc_to_choice": "{{[sol1, sol2]}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "goal" }, "sciq": { "task": "sciq", "group": [ "multiple_choice" ], "dataset_path": "sciq", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "{{support.lstrip()}}\nQuestion: {{question}}\nAnswer:", "doc_to_target": 3, "doc_to_choice": "{{[distractor1, distractor2, distractor3, correct_answer]}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{support}} {{question}}" }, "wikitext": { "task": "wikitext", "group": [ "perplexity", "loglikelihood_rolling" ], "dataset_path": "EleutherAI/wikitext_document_level", "dataset_name": "wikitext-2-raw-v1", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "", "doc_to_target": "", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "word_perplexity" }, { "metric": "byte_perplexity" }, { "metric": "bits_per_byte" } ], "output_type": "loglikelihood_rolling", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{page}}" }, "winogrande": { "task": "winogrande", "dataset_path": "winogrande", "dataset_name": "winogrande_xl", "training_split": "train", "validation_split": "validation", "doc_to_text": "", "doc_to_target": "", "doc_to_choice": "", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "sentence" } }, "versions": { "arc_challenge": "Yaml", "arc_easy": "Yaml", "boolq": "Yaml", "hellaswag": "Yaml", "lambada_openai": "Yaml", "openbookqa": "Yaml", "piqa": "Yaml", "sciq": "Yaml", "wikitext": "Yaml", "winogrande": "Yaml" }, "config": { "model": "hf", "model_args": "pretrained=lomahony/eleuther-pythia2.8b-hh-sft", "batch_size": "16", "batch_sizes": [], "device": null, "use_cache": null, "limit": null, "bootstrap_iters": 100000 }, "git_hash": "d1a44c8" }