datasetId
stringlengths
2
117
author
stringlengths
2
42
last_modified
timestamp[us, tz=UTC]
downloads
int64
0
9.36M
likes
int64
0
3.89k
tags
list
task_categories
list
createdAt
timestamp[us, tz=UTC]
card
stringlengths
19
1.01M
embedding
list
open-llm-leaderboard/details_TheBloke__Chinese-Alpaca-33B-SuperHOT-8K-fp16
open-llm-leaderboard
2023-08-27T12:33:26Z
201
0
[ "region:us" ]
null
2023-08-18T11:24:58Z
--- pretty_name: Evaluation run of TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__Chinese-Alpaca-33B-SuperHOT-8K-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-31T19:21:09.032023](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Chinese-Alpaca-33B-SuperHOT-8K-fp16/blob/main/results_2023-07-31T19%3A21%3A09.032023.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24079112101610886,\n\ \ \"acc_stderr\": 0.030961801782247226,\n \"acc_norm\": 0.24208994950215265,\n\ \ \"acc_norm_stderr\": 0.03097894827141845,\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.014816195991931588,\n \"mc2\": 0.4774590793334822,\n\ \ \"mc2_stderr\": 0.01691343346185639\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2175767918088737,\n \"acc_stderr\": 0.0120572620209725,\n\ \ \"acc_norm\": 0.26791808873720135,\n \"acc_norm_stderr\": 0.012942030195136426\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.26926906990639315,\n\ \ \"acc_stderr\": 0.004426734718808876,\n \"acc_norm\": 0.29555865365465045,\n\ \ \"acc_norm_stderr\": 0.004553609405747228\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.035914440841969694,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.035914440841969694\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.31,\n\ \ \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.2188679245283019,\n \"acc_stderr\": 0.025447863825108608,\n\ \ \"acc_norm\": 0.2188679245283019,\n \"acc_norm_stderr\": 0.025447863825108608\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.15,\n\ \ \"acc_stderr\": 0.03588702812826372,\n \"acc_norm\": 0.15,\n \ \ \"acc_norm_stderr\": 0.03588702812826372\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n\ \ \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.0414243971948936,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.0414243971948936\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n\ \ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2275132275132275,\n \"acc_stderr\": 0.021591269407823778,\n \"\ acc_norm\": 0.2275132275132275,\n \"acc_norm_stderr\": 0.021591269407823778\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.037184890068181146,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.037184890068181146\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.25161290322580643,\n \"acc_stderr\": 0.02468597928623997,\n \"\ acc_norm\": 0.25161290322580643,\n \"acc_norm_stderr\": 0.02468597928623997\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.23645320197044334,\n \"acc_stderr\": 0.029896114291733552,\n \"\ acc_norm\": 0.23645320197044334,\n \"acc_norm_stderr\": 0.029896114291733552\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\"\ : 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.0347769116216366,\n\ \ \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.0347769116216366\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.21212121212121213,\n \"acc_stderr\": 0.029126522834586804,\n \"\ acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.029126522834586804\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.02977866303775296,\n\ \ \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.02977866303775296\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.2128205128205128,\n \"acc_stderr\": 0.020752423722128013,\n\ \ \"acc_norm\": 0.2128205128205128,\n \"acc_norm_stderr\": 0.020752423722128013\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073828,\n \ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073828\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\ \ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"\ acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936087,\n \"\ acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936087\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\"\ : 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n\ \ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n\ \ \"acc_stderr\": 0.03058759135160425,\n \"acc_norm\": 0.2549019607843137,\n\ \ \"acc_norm_stderr\": 0.03058759135160425\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.28270042194092826,\n \"acc_stderr\": 0.02931281415395592,\n\ \ \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.02931281415395592\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.28699551569506726,\n\ \ \"acc_stderr\": 0.030360379710291947,\n \"acc_norm\": 0.28699551569506726,\n\ \ \"acc_norm_stderr\": 0.030360379710291947\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n\ \ \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \ \ \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\ \ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.29914529914529914,\n\ \ \"acc_stderr\": 0.029996951858349497,\n \"acc_norm\": 0.29914529914529914,\n\ \ \"acc_norm_stderr\": 0.029996951858349497\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26947637292464877,\n\ \ \"acc_stderr\": 0.01586624307321506,\n \"acc_norm\": 0.26947637292464877,\n\ \ \"acc_norm_stderr\": 0.01586624307321506\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.26878612716763006,\n \"acc_stderr\": 0.023868003262500114,\n\ \ \"acc_norm\": 0.26878612716763006,\n \"acc_norm_stderr\": 0.023868003262500114\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.02355083135199509,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.02355083135199509\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\ \ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\ \ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\ \ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.22340425531914893,\n \"acc_stderr\": 0.024847921358063962,\n \ \ \"acc_norm\": 0.22340425531914893,\n \"acc_norm_stderr\": 0.024847921358063962\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\ \ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\ \ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.024562204314142314,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.024562204314142314\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.20408163265306123,\n\ \ \"acc_stderr\": 0.02580128347509051,\n \"acc_norm\": 0.20408163265306123,\n\ \ \"acc_norm_stderr\": 0.02580128347509051\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n\ \ \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.26506024096385544,\n \"acc_stderr\": 0.03436024037944967,\n\ \ \"acc_norm\": 0.26506024096385544,\n \"acc_norm_stderr\": 0.03436024037944967\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\ \ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\ \ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931588,\n\ \ \"mc2\": 0.4774590793334822,\n \"mc2_stderr\": 0.01691343346185639\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|arc:challenge|25_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hellaswag|10_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:21:09.032023.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:21:09.032023.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T19_21_09.032023 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T19:21:09.032023.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T19:21:09.032023.parquet' - config_name: results data_files: - split: 2023_07_31T19_21_09.032023 path: - results_2023-07-31T19:21:09.032023.parquet - split: latest path: - results_2023-07-31T19:21:09.032023.parquet --- # Dataset Card for Evaluation run of TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__Chinese-Alpaca-33B-SuperHOT-8K-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-31T19:21:09.032023](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Chinese-Alpaca-33B-SuperHOT-8K-fp16/blob/main/results_2023-07-31T19%3A21%3A09.032023.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24079112101610886, "acc_stderr": 0.030961801782247226, "acc_norm": 0.24208994950215265, "acc_norm_stderr": 0.03097894827141845, "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931588, "mc2": 0.4774590793334822, "mc2_stderr": 0.01691343346185639 }, "harness|arc:challenge|25": { "acc": 0.2175767918088737, "acc_stderr": 0.0120572620209725, "acc_norm": 0.26791808873720135, "acc_norm_stderr": 0.012942030195136426 }, "harness|hellaswag|10": { "acc": 0.26926906990639315, "acc_stderr": 0.004426734718808876, "acc_norm": 0.29555865365465045, "acc_norm_stderr": 0.004553609405747228 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2222222222222222, "acc_stderr": 0.035914440841969694, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.035914440841969694 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2188679245283019, "acc_stderr": 0.025447863825108608, "acc_norm": 0.2188679245283019, "acc_norm_stderr": 0.025447863825108608 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.15, "acc_stderr": 0.03588702812826372, "acc_norm": 0.15, "acc_norm_stderr": 0.03588702812826372 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.0414243971948936, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.0414243971948936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2275132275132275, "acc_stderr": 0.021591269407823778, "acc_norm": 0.2275132275132275, "acc_norm_stderr": 0.021591269407823778 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2222222222222222, "acc_stderr": 0.037184890068181146, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.037184890068181146 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.02468597928623997, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.02468597928623997 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.029896114291733552, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.029896114291733552 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2727272727272727, "acc_stderr": 0.0347769116216366, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.21212121212121213, "acc_stderr": 0.029126522834586804, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.029126522834586804 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.02977866303775296, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.02977866303775296 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2128205128205128, "acc_stderr": 0.020752423722128013, "acc_norm": 0.2128205128205128, "acc_norm_stderr": 0.020752423722128013 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073828, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073828 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936087, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936087 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.03058759135160425, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.03058759135160425 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.02931281415395592, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.02931281415395592 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.28699551569506726, "acc_stderr": 0.030360379710291947, "acc_norm": 0.28699551569506726, "acc_norm_stderr": 0.030360379710291947 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.29914529914529914, "acc_stderr": 0.029996951858349497, "acc_norm": 0.29914529914529914, "acc_norm_stderr": 0.029996951858349497 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.26947637292464877, "acc_stderr": 0.01586624307321506, "acc_norm": 0.26947637292464877, "acc_norm_stderr": 0.01586624307321506 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.26878612716763006, "acc_stderr": 0.023868003262500114, "acc_norm": 0.26878612716763006, "acc_norm_stderr": 0.023868003262500114 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.21568627450980393, "acc_stderr": 0.02355083135199509, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.02355083135199509 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.22340425531914893, "acc_stderr": 0.024847921358063962, "acc_norm": 0.22340425531914893, "acc_norm_stderr": 0.024847921358063962 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20588235294117646, "acc_stderr": 0.024562204314142314, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.024562204314142314 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.20408163265306123, "acc_stderr": 0.02580128347509051, "acc_norm": 0.20408163265306123, "acc_norm_stderr": 0.02580128347509051 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-virology|5": { "acc": 0.26506024096385544, "acc_stderr": 0.03436024037944967, "acc_norm": 0.26506024096385544, "acc_norm_stderr": 0.03436024037944967 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931588, "mc2": 0.4774590793334822, "mc2_stderr": 0.01691343346185639 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7194718718528748, -0.8412551879882812, 0.24766066670417786, 0.21153834462165833, -0.1816261261701584, -0.04314514994621277, 0.039339564740657806, -0.27376335859298706, 0.5904322862625122, -0.09283187240362167, -0.5150615572929382, -0.6776431202888489, -0.4499250650405884, 0.22389984130859375, -0.03386631980538368, 0.8420403599739075, -0.19244155287742615, -0.13331080973148346, 0.1481592059135437, -0.06676600873470306, -0.2689374089241028, -0.30627548694610596, -0.5069376230239868, -0.35962116718292236, 0.17651347815990448, 0.4156962037086487, 0.4831004738807678, 0.8193359375, 0.705541729927063, 0.29481953382492065, -0.330518513917923, 0.01252034492790699, -0.1542946845293045, -0.29539600014686584, 0.3760501742362976, -0.3572058379650116, -0.8127644658088684, 0.2828117609024048, 0.7410832047462463, 0.5945356488227844, -0.08737020939588547, 0.29242056608200073, 0.016545306891202927, 0.537377655506134, -0.3721282184123993, 0.05662548169493675, -0.2941277027130127, 0.25135087966918945, -0.20646913349628448, -0.28092101216316223, -0.27510228753089905, -0.21300287544727325, -0.15361635386943817, -0.91209876537323, 0.2900328040122986, 0.31875571608543396, 1.6060383319854736, -0.1796521246433258, -0.1802719682455063, 0.0660669207572937, -0.11230549216270447, 1.0237603187561035, -0.8730648159980774, 0.3854806125164032, 0.7776153683662415, 0.11070387810468674, -0.19247134029865265, -0.545982837677002, -0.6658632755279541, 0.06816564500331879, -0.3769197165966034, 0.380048006772995, -0.04625657573342323, -0.19420212507247925, 0.35463473200798035, 0.6614378094673157, -0.6694635152816772, 0.168125182390213, -0.6385994553565979, -0.20312713086605072, 1.0772945880889893, 0.3067466914653778, 0.1175244152545929, -0.348149836063385, -0.684440553188324, -0.6445251107215881, -0.43034839630126953, 0.2733248174190521, 0.40647074580192566, 0.35389697551727295, -0.3958284854888916, 0.6775332093238831, -0.40705281496047974, 0.569469690322876, 0.39861559867858887, 0.004537451080977917, 0.8803143501281738, -0.6866192817687988, -0.5244519114494324, -0.07303039729595184, 1.1095095872879028, 0.6012269854545593, 0.04033426195383072, 0.22526653110980988, 0.023740844801068306, -0.10261845588684082, -0.03101450391113758, -0.8739649057388306, -0.2874275743961334, 0.18900330364704132, -0.39590418338775635, -0.4425233006477356, 0.3148864209651947, -0.8797469139099121, 0.1604018360376358, -0.008342220447957516, 0.4130138158798218, -0.5368324518203735, -0.11683820188045502, 0.22872452437877655, -0.41022995114326477, 0.8606594800949097, -0.17279741168022156, -0.8274371027946472, 0.3769993782043457, 0.5026636719703674, 0.7837579846382141, -0.07258247584104538, -0.47358372807502747, -0.10105711221694946, -0.07936551421880722, -0.31992268562316895, 0.5513195991516113, -0.2686038315296173, -0.43944159150123596, -0.29243117570877075, 0.32532626390457153, -0.2572125196456909, -0.35544300079345703, 0.716325044631958, -0.2464151680469513, 0.1850689947605133, -0.43368151783943176, -0.6437671780586243, 0.14789238572120667, 0.40102511644363403, -0.40919533371925354, 1.2777190208435059, 0.24585328996181488, -0.8011730313301086, 0.45908066630363464, -0.5938467979431152, -0.15665118396282196, 0.01732218638062477, -0.06894003599882126, -0.7923576235771179, -0.280651718378067, 0.1998538076877594, 0.4158577024936676, -0.12185154855251312, -0.15484564006328583, -0.37148866057395935, -0.38873904943466187, 0.3105977177619934, -0.14418858289718628, 1.2303476333618164, -0.026037409901618958, -0.7611472010612488, -0.12396983802318573, -1.2347619533538818, 0.2718063294887543, 0.24418288469314575, -0.35736843943595886, -0.18429137766361237, -0.503061830997467, -0.04760532081127167, 0.16458292305469513, 0.27440202236175537, -0.8517910242080688, 0.2895185947418213, -0.35153892636299133, 0.1622786521911621, 1.2510250806808472, 0.02361481823027134, 0.15062987804412842, -0.5261305570602417, 0.5239731073379517, 0.19584234058856964, 0.2090906947851181, 0.3372012674808502, -0.6067196726799011, -0.7955901026725769, -0.4943958818912506, -0.05245966836810112, 0.6009482741355896, -0.23117898404598236, 1.1218198537826538, 0.09990860521793365, -0.8906766772270203, -0.47014644742012024, -0.1312316507101059, 0.5144333839416504, 0.7846803665161133, 0.6480097770690918, -0.045956309884786606, -0.608273983001709, -1.107629656791687, -0.27538955211639404, -0.15684308111667633, 0.1516733169555664, 0.195160910487175, 1.0197808742523193, -0.2550443112850189, 0.5759656429290771, -1.024177074432373, -0.2257642298936844, 0.13383297622203827, -0.05872916057705879, 0.773443341255188, 0.771066427230835, 0.5945173501968384, -0.6657070517539978, -0.514515221118927, 0.211433544754982, -0.8876961469650269, -0.0875372663140297, 0.10242050886154175, -0.32125312089920044, 0.1502818614244461, 0.14949840307235718, -0.7314441800117493, 0.5325731039047241, 0.2198493778705597, -1.0464352369308472, 1.0560686588287354, -0.3396700918674469, 0.5789640545845032, -0.974351704120636, 0.17142422497272491, -0.028651900589466095, 0.07621578872203827, -0.5040311813354492, 0.08939327299594879, 0.11875129491090775, 0.4416899085044861, -0.5394405126571655, 0.8098992109298706, -0.6919957399368286, -0.06896261870861053, 0.4295160174369812, 0.09900286793708801, -0.08745601773262024, 0.3784865438938141, -0.19234353303909302, 0.7998213171958923, 0.7401009202003479, -0.4532686173915863, 0.5206645727157593, 0.42577946186065674, -0.2296569049358368, 0.6918514370918274, -0.49699610471725464, -0.30908697843551636, 0.2937544882297516, -0.0033881282433867455, -0.8058453798294067, -0.47272688150405884, 0.05062862113118172, -0.6066953539848328, -0.09524155408143997, 0.40018555521965027, -0.27054986357688904, -0.820466935634613, -0.9551785588264465, 0.3549429178237915, 0.6565450429916382, -0.44113513827323914, -0.16198955476284027, 0.03587702289223671, 0.09585385769605637, -0.8226374983787537, -0.8613020777702332, -0.4937213659286499, -0.19004590809345245, -0.6622427105903625, 0.33202826976776123, -0.2809942960739136, -0.22933641076087952, -0.09489963948726654, -0.20958563685417175, -0.3068951368331909, -0.009182566776871681, 0.10610789805650711, 0.7160519361495972, -0.38586530089378357, -0.30546480417251587, -0.24453723430633545, -0.17089101672172546, 0.19069986045360565, -0.08055619150400162, 0.39857786893844604, -0.45137161016464233, -0.3831886947154999, -0.47007060050964355, -0.00015847758913878351, 0.6853935122489929, -0.09661408513784409, 0.7620908617973328, 0.44852080941200256, -0.31444287300109863, -0.006253852043300867, -0.27861008048057556, -0.26141828298568726, -0.5812301635742188, 0.29676997661590576, -0.5096989870071411, -1.0347999334335327, 0.82623291015625, 0.5848978757858276, 0.08643548935651779, 1.123465657234192, 0.5961479544639587, -0.30912676453590393, 1.0448064804077148, 0.049276985228061676, 0.3306744396686554, 0.4006279706954956, -0.7260558605194092, 0.11773271858692169, -0.9326660633087158, -0.33664125204086304, -0.5724575519561768, -0.4809824824333191, -0.7012487649917603, -0.07709446549415588, 0.25061818957328796, 0.15576708316802979, -0.6920375227928162, 0.6130332350730896, -0.8443464636802673, 0.6031493544578552, 0.5909368991851807, 0.27230846881866455, 0.16355764865875244, -0.18041285872459412, -0.3832028806209564, -0.1204427182674408, -0.4672076404094696, -0.2073170691728592, 1.2034721374511719, 0.2723589539527893, 0.7002419233322144, 0.05776034668087959, 0.8782728314399719, 0.0745527520775795, -0.04544089362025261, -0.6056538224220276, 0.6743372082710266, 0.08985549956560135, -0.7958080768585205, -0.42425915598869324, -0.4854455888271332, -1.1086152791976929, 0.38397446274757385, -0.1524127572774887, -0.8471808433532715, 0.13159064948558807, 0.04352119192481041, -0.23133766651153564, 0.48763778805732727, -0.5414665937423706, 0.850673496723175, -0.1369999498128891, -0.4661204218864441, 0.12548865377902985, -0.8427247405052185, 0.4616847336292267, 0.19336532056331635, 0.2685098350048065, 0.041812945157289505, 0.2561130225658417, 1.1945626735687256, -0.8352013826370239, 0.39543837308883667, 0.07048940658569336, 0.025856366381049156, 0.3478521406650543, -0.1651829481124878, 0.5100048780441284, 0.04173152893781662, -0.029541585594415665, -0.07493340969085693, 0.2931387722492218, -0.865304708480835, -0.07598017156124115, 0.9088851809501648, -0.9695857763290405, -0.5926740765571594, -0.8562935590744019, -0.5528805255889893, 0.06581655889749527, 0.5749228000640869, 0.4059331715106964, 0.525036633014679, 0.043931744992733, 0.45761027932167053, 0.8205142021179199, -0.16229194402694702, 0.6258786916732788, 0.2728574275970459, 0.07517705112695694, -0.638239860534668, 0.8714563250541687, 0.141998291015625, 0.3626168370246887, 0.28180959820747375, 0.4180215001106262, -0.5352848768234253, -0.24695119261741638, -0.261251837015152, 0.4952666461467743, -0.6173401474952698, -0.27500855922698975, -0.35305076837539673, -0.3840746581554413, -0.7822995781898499, -0.6454393267631531, -0.3131715655326843, -0.4808049201965332, -0.4578869640827179, -0.5175482034683228, 0.5749918222427368, 0.42891332507133484, -0.3633445203304291, 0.06661158800125122, -0.5538429617881775, 0.269659161567688, 0.34651613235473633, 0.5251463651657104, -0.37119996547698975, -0.6011459827423096, 0.0004246989556122571, -0.13181321322917938, -0.5785118341445923, -0.9547222852706909, 0.3692966401576996, -0.033234212547540665, 0.5272863507270813, 0.5740861296653748, 0.03860650956630707, 0.8796200156211853, -0.2163006216287613, 1.0658930540084839, 0.3601374328136444, -0.793414831161499, 0.7626211047172546, -0.3417401611804962, 0.19867649674415588, 0.6480584740638733, 0.20825842022895813, -0.1801263988018036, -0.7203987240791321, -1.2998358011245728, -0.8043987154960632, 0.6596293449401855, 0.3818066716194153, -0.2561050057411194, 0.03382011130452156, 0.15236054360866547, -0.29268786311149597, -0.2016773223876953, -0.7066518068313599, -0.8546401262283325, -0.17971967160701752, -0.495025634765625, 0.12770996987819672, 0.08863120526075363, -0.38538840413093567, -0.8179268836975098, 0.9543057680130005, -0.0007278380217030644, 0.5592613220214844, 0.4952737092971802, 0.06921760737895966, 0.041556622833013535, 0.44884946942329407, 0.9039116501808167, 0.725610077381134, -0.46362510323524475, 0.38310161232948303, 0.4028388261795044, -1.0526601076126099, 0.46597036719322205, 0.3188556730747223, -0.1239553913474083, -0.02921721339225769, 0.49420303106307983, 0.423536092042923, 0.020823169499635696, -0.23264354467391968, 0.6255229115486145, -0.0015738896327093244, -0.5620928406715393, -0.31844696402549744, 0.09917227178812027, -0.130318284034729, 0.007054885849356651, 0.41851961612701416, -0.1991732269525528, -0.030074993148446083, -0.5384535789489746, 0.458036333322525, 0.39233240485191345, -0.4581056833267212, -0.17034867405891418, 0.7311553955078125, -0.17829768359661102, -0.17567312717437744, 0.3121756911277771, -0.22131451964378357, -0.6217946410179138, 1.1706904172897339, 0.6106492280960083, 0.663084864616394, -0.28238752484321594, -0.06133190542459488, 0.9078725576400757, 0.4008442759513855, -0.045179639011621475, 0.4924084544181824, 0.3182983100414276, -0.3114117980003357, 0.16775400936603546, -0.88604336977005, -0.07365914434194565, 0.1911381483078003, -0.8184812068939209, 0.3136662542819977, -0.534663200378418, -0.1708051711320877, 0.025838321074843407, 0.4153151512145996, -0.4430566430091858, 0.5777915120124817, -0.38370293378829956, 1.21746826171875, -1.0121312141418457, 0.7191341519355774, 0.7562375068664551, -0.5731936097145081, -1.0782371759414673, -0.543160080909729, 0.0018705747788771987, -0.849284827709198, 0.5623205304145813, -0.01447381917387247, 0.16609197854995728, -0.10252374410629272, -0.7422521710395813, -0.8946625590324402, 1.4157745838165283, -0.04560244083404541, -0.4483397603034973, 0.25314152240753174, -0.08236925303936005, 0.45975199341773987, 0.13081113994121552, 0.6064948439598083, 0.7816503047943115, 0.7801823019981384, -0.06454398483037949, -0.7469803094863892, 0.33100855350494385, -0.4993579685688019, -0.3075588047504425, 0.4594770073890686, -0.9972780346870422, 1.1776282787322998, 0.009059210307896137, 0.21138334274291992, -0.18117254972457886, 0.6787951588630676, 0.8386157751083374, 0.2924834191799164, 0.3571746051311493, 0.9050208926200867, 0.8149990439414978, -0.4647262394428253, 0.9965687394142151, -0.22791284322738647, 0.8779494166374207, 0.7200352549552917, 0.20589782297611237, 0.787813127040863, 0.6669326424598694, -0.5772596597671509, 0.559474527835846, 0.8194157481193542, -0.31188642978668213, 0.38852351903915405, 0.2330320030450821, -0.1181454136967659, -0.10038458555936813, 0.39196112751960754, -0.8792248964309692, 0.14040198922157288, 0.08201739192008972, -0.34223586320877075, 0.10157639533281326, -0.4235962927341461, 0.3113163411617279, -0.10517790168523788, -0.08299322426319122, 0.3658476173877716, 0.027681048959493637, -0.45954200625419617, 0.9237420558929443, -0.17188619077205658, 0.7918328642845154, -0.543651819229126, -0.08538806438446045, -0.4074302315711975, 0.5658043026924133, -0.4587308168411255, -1.0748509168624878, 0.20127227902412415, 0.04687173664569855, -0.10975813865661621, -0.15975254774093628, 0.7058953642845154, -0.20530572533607483, -0.7911100387573242, 0.15361616015434265, 0.05629756674170494, 0.09677083790302277, 0.5534359216690063, -0.7168887257575989, -0.30565783381462097, -0.05194288119673729, -0.577872633934021, 0.14254866540431976, 0.2837746739387512, 0.27193552255630493, 0.5464908480644226, 0.655396580696106, 0.18874633312225342, 0.40769273042678833, -0.5437703132629395, 0.790193498134613, -1.0439871549606323, -0.7251049876213074, -0.9313704967498779, 0.423825204372406, -0.31487205624580383, -0.8569938540458679, 0.9790440797805786, 1.036893367767334, 0.8828344941139221, 0.018733514472842216, 0.6225661039352417, -0.376675546169281, 0.2713627815246582, -0.3824597895145416, 0.9274168610572815, -0.8506884574890137, -0.2211749255657196, -0.26491060853004456, -0.7116405367851257, -0.3973310589790344, 0.8726658225059509, -0.13877888023853302, 0.007854962721467018, 1.0477687120437622, 0.712054431438446, -0.09452155232429504, 0.05887060612440109, -0.06833571195602417, 0.5815763473510742, 0.40614351630210876, 0.9852750897407532, 0.6164450645446777, -0.7949754595756531, 0.3605252206325531, -0.5116027593612671, -0.4458532929420471, -0.3727879226207733, -0.4719693064689636, -0.7907747030258179, -0.43653061985969543, -0.22859777510166168, -0.5945286154747009, -0.11493303626775742, 1.0012741088867188, 0.47552528977394104, -0.9538512825965881, -0.4130028188228607, -0.14299309253692627, 0.12123077362775803, -0.5897272229194641, -0.41770920157432556, 0.7386965155601501, -0.10471685230731964, -0.5664253234863281, 0.1889890432357788, -0.08700607717037201, 0.2393450140953064, 0.09273555874824524, -0.3931582570075989, -0.7393785119056702, 0.041176147758960724, 0.4547973573207855, 0.38335850834846497, -0.6701880097389221, -0.7281239628791809, 0.29600828886032104, -0.5044350028038025, 0.42129844427108765, -0.03903843089938164, -0.5117306113243103, 0.059062566608190536, 0.664801299571991, 0.5054982900619507, 0.6591076850891113, -0.05228043347597122, 0.07097402215003967, -0.6445116996765137, 0.20272235572338104, -0.0068782661110162735, 0.316636323928833, -0.024079421535134315, -0.3102108836174011, 0.7788428664207458, 0.675963819026947, -0.5450901389122009, -1.0691401958465576, -0.42711836099624634, -1.433038592338562, -0.03922043368220329, 1.1148877143859863, -0.031725503504276276, -0.49351850152015686, 0.23197390139102936, -0.13020259141921997, 0.21573378145694733, -0.3256664574146271, 0.7530571818351746, 0.793433666229248, -0.3960719108581543, 0.09647098183631897, -0.6294829845428467, 0.3572355806827545, 0.5563302040100098, -1.1971572637557983, -0.10808686167001724, 0.23691987991333008, 0.2868340313434601, 0.3887383043766022, 0.6633983254432678, -0.10150247812271118, 0.28608793020248413, 0.20418165624141693, 0.04741447791457176, 0.022853244096040726, 0.06342897564172745, -0.21202988922595978, 0.08808379620313644, -0.27076077461242676, -0.4759021997451782 ]
open-llm-leaderboard/details_TheBloke__wizardLM-7B-HF
open-llm-leaderboard
2023-08-27T12:33:27Z
201
0
[ "region:us" ]
null
2023-08-18T11:25:07Z
--- pretty_name: Evaluation run of TheBloke/wizardLM-7B-HF dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/wizardLM-7B-HF](https://huggingface.co/TheBloke/wizardLM-7B-HF) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__wizardLM-7B-HF\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-18T11:33:18.439367](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__wizardLM-7B-HF/blob/main/results_2023-07-18T11%3A33%3A18.439367.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.38566819917906325,\n\ \ \"acc_stderr\": 0.03482242619787474,\n \"acc_norm\": 0.3891088361419288,\n\ \ \"acc_norm_stderr\": 0.03481173503822327,\n \"mc1\": 0.31456548347613217,\n\ \ \"mc1_stderr\": 0.01625524199317919,\n \"mc2\": 0.45584096136441793,\n\ \ \"mc2_stderr\": 0.016028055350830416\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.48464163822525597,\n \"acc_stderr\": 0.014604496129394913,\n\ \ \"acc_norm\": 0.5034129692832765,\n \"acc_norm_stderr\": 0.014611050403244081\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5685122485560645,\n\ \ \"acc_stderr\": 0.004942716091996078,\n \"acc_norm\": 0.7527384983071101,\n\ \ \"acc_norm_stderr\": 0.004305383398710189\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n\ \ \"acc_stderr\": 0.042849586397534,\n \"acc_norm\": 0.43703703703703706,\n\ \ \"acc_norm_stderr\": 0.042849586397534\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.40131578947368424,\n \"acc_stderr\": 0.03988903703336284,\n\ \ \"acc_norm\": 0.40131578947368424,\n \"acc_norm_stderr\": 0.03988903703336284\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.48,\n\ \ \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.4377358490566038,\n \"acc_stderr\": 0.03053333843046751,\n\ \ \"acc_norm\": 0.4377358490566038,\n \"acc_norm_stderr\": 0.03053333843046751\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3680555555555556,\n\ \ \"acc_stderr\": 0.04032999053960719,\n \"acc_norm\": 0.3680555555555556,\n\ \ \"acc_norm_stderr\": 0.04032999053960719\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.35260115606936415,\n\ \ \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.35260115606936415,\n\ \ \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n\ \ \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.03202563076101735,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.03202563076101735\n },\n\ \ \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.32413793103448274,\n \"acc_stderr\": 0.03900432069185555,\n\ \ \"acc_norm\": 0.32413793103448274,\n \"acc_norm_stderr\": 0.03900432069185555\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.30687830687830686,\n \"acc_stderr\": 0.02375292871211214,\n \"\ acc_norm\": 0.30687830687830686,\n \"acc_norm_stderr\": 0.02375292871211214\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n\ \ \"acc_stderr\": 0.03893259610604675,\n \"acc_norm\": 0.25396825396825395,\n\ \ \"acc_norm_stderr\": 0.03893259610604675\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.36129032258064514,\n \"acc_stderr\": 0.02732754844795754,\n \"\ acc_norm\": 0.36129032258064514,\n \"acc_norm_stderr\": 0.02732754844795754\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.30049261083743845,\n \"acc_stderr\": 0.03225799476233484,\n \"\ acc_norm\": 0.30049261083743845,\n \"acc_norm_stderr\": 0.03225799476233484\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\"\ : 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.45454545454545453,\n \"acc_stderr\": 0.03888176921674099,\n\ \ \"acc_norm\": 0.45454545454545453,\n \"acc_norm_stderr\": 0.03888176921674099\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.42424242424242425,\n \"acc_stderr\": 0.03521224908841583,\n \"\ acc_norm\": 0.42424242424242425,\n \"acc_norm_stderr\": 0.03521224908841583\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.46632124352331605,\n \"acc_stderr\": 0.03600244069867178,\n\ \ \"acc_norm\": 0.46632124352331605,\n \"acc_norm_stderr\": 0.03600244069867178\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.35384615384615387,\n \"acc_stderr\": 0.024243783994062164,\n\ \ \"acc_norm\": 0.35384615384615387,\n \"acc_norm_stderr\": 0.024243783994062164\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24444444444444444,\n \"acc_stderr\": 0.026202766534652148,\n \ \ \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.026202766534652148\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.3277310924369748,\n \"acc_stderr\": 0.030489911417673227,\n\ \ \"acc_norm\": 0.3277310924369748,\n \"acc_norm_stderr\": 0.030489911417673227\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"\ acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.46605504587155966,\n \"acc_stderr\": 0.021387863350353992,\n \"\ acc_norm\": 0.46605504587155966,\n \"acc_norm_stderr\": 0.021387863350353992\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.2175925925925926,\n \"acc_stderr\": 0.028139689444859672,\n \"\ acc_norm\": 0.2175925925925926,\n \"acc_norm_stderr\": 0.028139689444859672\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.45588235294117646,\n \"acc_stderr\": 0.03495624522015474,\n \"\ acc_norm\": 0.45588235294117646,\n \"acc_norm_stderr\": 0.03495624522015474\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.43037974683544306,\n \"acc_stderr\": 0.03223017195937597,\n \ \ \"acc_norm\": 0.43037974683544306,\n \"acc_norm_stderr\": 0.03223017195937597\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5112107623318386,\n\ \ \"acc_stderr\": 0.033549366530984746,\n \"acc_norm\": 0.5112107623318386,\n\ \ \"acc_norm_stderr\": 0.033549366530984746\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.3893129770992366,\n \"acc_stderr\": 0.04276486542814591,\n\ \ \"acc_norm\": 0.3893129770992366,\n \"acc_norm_stderr\": 0.04276486542814591\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5785123966942148,\n \"acc_stderr\": 0.04507732278775087,\n \"\ acc_norm\": 0.5785123966942148,\n \"acc_norm_stderr\": 0.04507732278775087\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4537037037037037,\n\ \ \"acc_stderr\": 0.04812917324536821,\n \"acc_norm\": 0.4537037037037037,\n\ \ \"acc_norm_stderr\": 0.04812917324536821\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.3987730061349693,\n \"acc_stderr\": 0.038470214204560246,\n\ \ \"acc_norm\": 0.3987730061349693,\n \"acc_norm_stderr\": 0.038470214204560246\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.3592233009708738,\n \"acc_stderr\": 0.047504583990416946,\n\ \ \"acc_norm\": 0.3592233009708738,\n \"acc_norm_stderr\": 0.047504583990416946\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5170940170940171,\n\ \ \"acc_stderr\": 0.032736940493481824,\n \"acc_norm\": 0.5170940170940171,\n\ \ \"acc_norm_stderr\": 0.032736940493481824\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \ \ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.545338441890166,\n\ \ \"acc_stderr\": 0.017806304585052602,\n \"acc_norm\": 0.545338441890166,\n\ \ \"acc_norm_stderr\": 0.017806304585052602\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.38439306358381503,\n \"acc_stderr\": 0.026189666966272035,\n\ \ \"acc_norm\": 0.38439306358381503,\n \"acc_norm_stderr\": 0.026189666966272035\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23016759776536314,\n\ \ \"acc_stderr\": 0.014078339253425819,\n \"acc_norm\": 0.23016759776536314,\n\ \ \"acc_norm_stderr\": 0.014078339253425819\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.4084967320261438,\n \"acc_stderr\": 0.028146405993096358,\n\ \ \"acc_norm\": 0.4084967320261438,\n \"acc_norm_stderr\": 0.028146405993096358\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3858520900321543,\n\ \ \"acc_stderr\": 0.027648149599751457,\n \"acc_norm\": 0.3858520900321543,\n\ \ \"acc_norm_stderr\": 0.027648149599751457\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.39814814814814814,\n \"acc_stderr\": 0.027237415094592477,\n\ \ \"acc_norm\": 0.39814814814814814,\n \"acc_norm_stderr\": 0.027237415094592477\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3191489361702128,\n \"acc_stderr\": 0.027807990141320193,\n \ \ \"acc_norm\": 0.3191489361702128,\n \"acc_norm_stderr\": 0.027807990141320193\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3220338983050847,\n\ \ \"acc_stderr\": 0.01193393607189109,\n \"acc_norm\": 0.3220338983050847,\n\ \ \"acc_norm_stderr\": 0.01193393607189109\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.3860294117647059,\n \"acc_stderr\": 0.029573269134411124,\n\ \ \"acc_norm\": 0.3860294117647059,\n \"acc_norm_stderr\": 0.029573269134411124\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.40032679738562094,\n \"acc_stderr\": 0.019821843688271765,\n \ \ \"acc_norm\": 0.40032679738562094,\n \"acc_norm_stderr\": 0.019821843688271765\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.41818181818181815,\n\ \ \"acc_stderr\": 0.04724577405731571,\n \"acc_norm\": 0.41818181818181815,\n\ \ \"acc_norm_stderr\": 0.04724577405731571\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.3142857142857143,\n \"acc_stderr\": 0.029719329422417482,\n\ \ \"acc_norm\": 0.3142857142857143,\n \"acc_norm_stderr\": 0.029719329422417482\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.47761194029850745,\n\ \ \"acc_stderr\": 0.035319879302087305,\n \"acc_norm\": 0.47761194029850745,\n\ \ \"acc_norm_stderr\": 0.035319879302087305\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\"\ : 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.39156626506024095,\n \"acc_stderr\": 0.03799857454479637,\n\ \ \"acc_norm\": 0.39156626506024095,\n \"acc_norm_stderr\": 0.03799857454479637\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.5380116959064327,\n\ \ \"acc_stderr\": 0.038237270928823064,\n \"acc_norm\": 0.5380116959064327,\n\ \ \"acc_norm_stderr\": 0.038237270928823064\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.31456548347613217,\n \"mc1_stderr\": 0.01625524199317919,\n\ \ \"mc2\": 0.45584096136441793,\n \"mc2_stderr\": 0.016028055350830416\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/wizardLM-7B-HF leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|arc:challenge|25_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hellaswag|10_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-18T11:33:18.439367.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-management|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T11:33:18.439367.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_18T11_33_18.439367 path: - '**/details_harness|truthfulqa:mc|0_2023-07-18T11:33:18.439367.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-18T11:33:18.439367.parquet' - config_name: results data_files: - split: 2023_07_18T11_33_18.439367 path: - results_2023-07-18T11:33:18.439367.parquet - split: latest path: - results_2023-07-18T11:33:18.439367.parquet --- # Dataset Card for Evaluation run of TheBloke/wizardLM-7B-HF ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/wizardLM-7B-HF - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/wizardLM-7B-HF](https://huggingface.co/TheBloke/wizardLM-7B-HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__wizardLM-7B-HF", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-18T11:33:18.439367](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__wizardLM-7B-HF/blob/main/results_2023-07-18T11%3A33%3A18.439367.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.38566819917906325, "acc_stderr": 0.03482242619787474, "acc_norm": 0.3891088361419288, "acc_norm_stderr": 0.03481173503822327, "mc1": 0.31456548347613217, "mc1_stderr": 0.01625524199317919, "mc2": 0.45584096136441793, "mc2_stderr": 0.016028055350830416 }, "harness|arc:challenge|25": { "acc": 0.48464163822525597, "acc_stderr": 0.014604496129394913, "acc_norm": 0.5034129692832765, "acc_norm_stderr": 0.014611050403244081 }, "harness|hellaswag|10": { "acc": 0.5685122485560645, "acc_stderr": 0.004942716091996078, "acc_norm": 0.7527384983071101, "acc_norm_stderr": 0.004305383398710189 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.042849586397534, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.042849586397534 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.03988903703336284, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.03988903703336284 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4377358490566038, "acc_stderr": 0.03053333843046751, "acc_norm": 0.4377358490566038, "acc_norm_stderr": 0.03053333843046751 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3680555555555556, "acc_stderr": 0.04032999053960719, "acc_norm": 0.3680555555555556, "acc_norm_stderr": 0.04032999053960719 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.35260115606936415, "acc_stderr": 0.036430371689585475, "acc_norm": 0.35260115606936415, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4, "acc_stderr": 0.03202563076101735, "acc_norm": 0.4, "acc_norm_stderr": 0.03202563076101735 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.32413793103448274, "acc_stderr": 0.03900432069185555, "acc_norm": 0.32413793103448274, "acc_norm_stderr": 0.03900432069185555 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30687830687830686, "acc_stderr": 0.02375292871211214, "acc_norm": 0.30687830687830686, "acc_norm_stderr": 0.02375292871211214 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.03893259610604675, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.03893259610604675 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.36129032258064514, "acc_stderr": 0.02732754844795754, "acc_norm": 0.36129032258064514, "acc_norm_stderr": 0.02732754844795754 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.30049261083743845, "acc_stderr": 0.03225799476233484, "acc_norm": 0.30049261083743845, "acc_norm_stderr": 0.03225799476233484 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.45454545454545453, "acc_stderr": 0.03888176921674099, "acc_norm": 0.45454545454545453, "acc_norm_stderr": 0.03888176921674099 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.42424242424242425, "acc_stderr": 0.03521224908841583, "acc_norm": 0.42424242424242425, "acc_norm_stderr": 0.03521224908841583 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.46632124352331605, "acc_stderr": 0.03600244069867178, "acc_norm": 0.46632124352331605, "acc_norm_stderr": 0.03600244069867178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.35384615384615387, "acc_stderr": 0.024243783994062164, "acc_norm": 0.35384615384615387, "acc_norm_stderr": 0.024243783994062164 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.026202766534652148, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.026202766534652148 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3277310924369748, "acc_stderr": 0.030489911417673227, "acc_norm": 0.3277310924369748, "acc_norm_stderr": 0.030489911417673227 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.46605504587155966, "acc_stderr": 0.021387863350353992, "acc_norm": 0.46605504587155966, "acc_norm_stderr": 0.021387863350353992 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2175925925925926, "acc_stderr": 0.028139689444859672, "acc_norm": 0.2175925925925926, "acc_norm_stderr": 0.028139689444859672 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.45588235294117646, "acc_stderr": 0.03495624522015474, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.03495624522015474 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.43037974683544306, "acc_stderr": 0.03223017195937597, "acc_norm": 0.43037974683544306, "acc_norm_stderr": 0.03223017195937597 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5112107623318386, "acc_stderr": 0.033549366530984746, "acc_norm": 0.5112107623318386, "acc_norm_stderr": 0.033549366530984746 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.3893129770992366, "acc_stderr": 0.04276486542814591, "acc_norm": 0.3893129770992366, "acc_norm_stderr": 0.04276486542814591 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5785123966942148, "acc_stderr": 0.04507732278775087, "acc_norm": 0.5785123966942148, "acc_norm_stderr": 0.04507732278775087 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4537037037037037, "acc_stderr": 0.04812917324536821, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.04812917324536821 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3987730061349693, "acc_stderr": 0.038470214204560246, "acc_norm": 0.3987730061349693, "acc_norm_stderr": 0.038470214204560246 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.3592233009708738, "acc_stderr": 0.047504583990416946, "acc_norm": 0.3592233009708738, "acc_norm_stderr": 0.047504583990416946 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5170940170940171, "acc_stderr": 0.032736940493481824, "acc_norm": 0.5170940170940171, "acc_norm_stderr": 0.032736940493481824 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.545338441890166, "acc_stderr": 0.017806304585052602, "acc_norm": 0.545338441890166, "acc_norm_stderr": 0.017806304585052602 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.38439306358381503, "acc_stderr": 0.026189666966272035, "acc_norm": 0.38439306358381503, "acc_norm_stderr": 0.026189666966272035 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23016759776536314, "acc_stderr": 0.014078339253425819, "acc_norm": 0.23016759776536314, "acc_norm_stderr": 0.014078339253425819 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4084967320261438, "acc_stderr": 0.028146405993096358, "acc_norm": 0.4084967320261438, "acc_norm_stderr": 0.028146405993096358 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3858520900321543, "acc_stderr": 0.027648149599751457, "acc_norm": 0.3858520900321543, "acc_norm_stderr": 0.027648149599751457 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.39814814814814814, "acc_stderr": 0.027237415094592477, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.027237415094592477 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3191489361702128, "acc_stderr": 0.027807990141320193, "acc_norm": 0.3191489361702128, "acc_norm_stderr": 0.027807990141320193 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3220338983050847, "acc_stderr": 0.01193393607189109, "acc_norm": 0.3220338983050847, "acc_norm_stderr": 0.01193393607189109 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3860294117647059, "acc_stderr": 0.029573269134411124, "acc_norm": 0.3860294117647059, "acc_norm_stderr": 0.029573269134411124 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.40032679738562094, "acc_stderr": 0.019821843688271765, "acc_norm": 0.40032679738562094, "acc_norm_stderr": 0.019821843688271765 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.04724577405731571, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.04724577405731571 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3142857142857143, "acc_stderr": 0.029719329422417482, "acc_norm": 0.3142857142857143, "acc_norm_stderr": 0.029719329422417482 }, "harness|hendrycksTest-sociology|5": { "acc": 0.47761194029850745, "acc_stderr": 0.035319879302087305, "acc_norm": 0.47761194029850745, "acc_norm_stderr": 0.035319879302087305 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479637, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479637 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.5380116959064327, "acc_stderr": 0.038237270928823064, "acc_norm": 0.5380116959064327, "acc_norm_stderr": 0.038237270928823064 }, "harness|truthfulqa:mc|0": { "mc1": 0.31456548347613217, "mc1_stderr": 0.01625524199317919, "mc2": 0.45584096136441793, "mc2_stderr": 0.016028055350830416 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7412386536598206, -0.8451684713363647, 0.23718196153640747, 0.2281021922826767, -0.12525472044944763, -0.04453934356570244, 0.035789284855127335, -0.2112087607383728, 0.5444607734680176, -0.03812447562813759, -0.4951113164424896, -0.6845880746841431, -0.41259703040122986, 0.258756548166275, -0.03738831728696823, 0.8163996338844299, -0.1856212615966797, -0.12739910185337067, 0.09599094837903976, -0.023580290377140045, -0.2854141891002655, -0.3483699858188629, -0.45877471566200256, -0.37935876846313477, 0.21321901679039001, 0.4094218611717224, 0.4419425427913666, 0.7691401839256287, 0.6623591780662537, 0.2937638759613037, -0.3112097978591919, 0.011752968654036522, -0.19946788251399994, -0.2941954731941223, 0.36403214931488037, -0.30128002166748047, -0.8549708724021912, 0.30104511976242065, 0.7474606037139893, 0.6586303114891052, -0.10724237561225891, 0.2884155511856079, 0.03187400847673416, 0.5994716882705688, -0.38917022943496704, 0.04005502164363861, -0.290482759475708, 0.2376500517129898, -0.18049634993076324, -0.292175829410553, -0.32324036955833435, -0.231443852186203, -0.11335722357034683, -0.8874165415763855, 0.3041418194770813, 0.3763263523578644, 1.5627968311309814, -0.17937280237674713, -0.2470695823431015, 0.11562962830066681, -0.100326769053936, 0.9873337149620056, -0.8791664242744446, 0.38214579224586487, 0.8031424283981323, 0.13580062985420227, -0.20718255639076233, -0.5553432703018188, -0.6392638683319092, 0.05907217413187027, -0.37638577818870544, 0.3470320999622345, -0.029439061880111694, -0.16379176080226898, 0.37168949842453003, 0.6679123640060425, -0.6905158758163452, 0.15140430629253387, -0.686161994934082, -0.21758557856082916, 1.083012342453003, 0.35946351289749146, 0.04909971356391907, -0.34789592027664185, -0.671917736530304, -0.6332131624221802, -0.4202052056789398, 0.2332570105791092, 0.4261946380138397, 0.34672829508781433, -0.3718414902687073, 0.7188560962677002, -0.41772642731666565, 0.6066851615905762, 0.4427218735218048, 0.006494731642305851, 0.8970290422439575, -0.671573281288147, -0.5517393946647644, -0.0277815293520689, 1.084822654724121, 0.5884415507316589, 0.06531675904989243, 0.19863861799240112, 0.016301918774843216, -0.14598391950130463, 0.04778604581952095, -0.8585749268531799, -0.30698370933532715, 0.16272027790546417, -0.3867771327495575, -0.4696497917175293, 0.3254142105579376, -0.8761470913887024, 0.09505251795053482, -0.04371946305036545, 0.4195646345615387, -0.5084505677223206, -0.1283494383096695, 0.2450399249792099, -0.45363709330558777, 0.8383366465568542, -0.14441898465156555, -0.7893185019493103, 0.36763444542884827, 0.49843287467956543, 0.7563958764076233, -0.15241189301013947, -0.4749995172023773, -0.11041262745857239, -0.07748136669397354, -0.32659584283828735, 0.52027428150177, -0.2506655156612396, -0.4419108033180237, -0.2652227282524109, 0.3184099793434143, -0.28057974576950073, -0.36833998560905457, 0.7098530530929565, -0.24443931877613068, 0.2163274735212326, -0.43477290868759155, -0.6791768074035645, 0.13974490761756897, 0.3945070505142212, -0.4357996881008148, 1.2865030765533447, 0.26579445600509644, -0.8191532492637634, 0.42922577261924744, -0.589810848236084, -0.1583399921655655, -0.03844958543777466, 0.005754728335887194, -0.7741491198539734, -0.27237793803215027, 0.13055090606212616, 0.3843356668949127, -0.12149964272975922, -0.15616197884082794, -0.4000759720802307, -0.3582767844200134, 0.33786138892173767, -0.2049911916255951, 1.2264035940170288, -0.044922806322574615, -0.7441642880439758, -0.1272314041852951, -1.2749136686325073, 0.29619041085243225, 0.19922994077205658, -0.3843512237071991, -0.18283164501190186, -0.4988337457180023, -0.0267143864184618, 0.1887545883655548, 0.28519073128700256, -0.785302460193634, 0.2531423568725586, -0.37544459104537964, 0.06457918137311935, 1.2590179443359375, 0.042266108095645905, 0.16634193062782288, -0.5378521680831909, 0.5006794333457947, 0.19521628320217133, 0.19960206747055054, 0.4137432277202606, -0.5977319478988647, -0.7931774854660034, -0.4821073114871979, -0.054665274918079376, 0.6231312155723572, -0.17050357162952423, 1.1820322275161743, 0.09998355805873871, -0.9039890170097351, -0.4427361786365509, -0.11534906178712845, 0.48850303888320923, 0.8306117653846741, 0.6181433796882629, -0.03657674416899681, -0.5790847539901733, -1.112028956413269, -0.31089720129966736, -0.1597282886505127, 0.14377139508724213, 0.17583517730236053, 1.012686848640442, -0.22671540081501007, 0.6202346086502075, -1.0342113971710205, -0.18259982764720917, 0.18588131666183472, -0.07462014257907867, 0.779839038848877, 0.7341006994247437, 0.5556036233901978, -0.6635493636131287, -0.5542020201683044, 0.21889320015907288, -0.869779109954834, -0.10541829466819763, 0.14057551324367523, -0.3612784445285797, 0.1500786393880844, 0.10490382462739944, -0.7374818921089172, 0.5842177867889404, 0.19639952480793, -1.1270045042037964, 1.1005353927612305, -0.35755014419555664, 0.5937822461128235, -0.9479098916053772, 0.15515519678592682, -0.08409412205219269, 0.052997466176748276, -0.5127480626106262, -0.035305608063936234, 0.11648201942443848, 0.46936723589897156, -0.5112748146057129, 0.8376340270042419, -0.7207787036895752, -0.06566458195447922, 0.42040544748306274, 0.12508784234523773, -0.08419421315193176, 0.37339749932289124, -0.23630723357200623, 0.7789302468299866, 0.7679183483123779, -0.46704596281051636, 0.55324387550354, 0.41812828183174133, -0.2045261561870575, 0.7399208545684814, -0.45838847756385803, -0.3385942578315735, 0.30296167731285095, -0.060393258929252625, -0.792871356010437, -0.47795918583869934, 0.04228731244802475, -0.5885711312294006, -0.0729641318321228, 0.4105170965194702, -0.2797508239746094, -0.8368440270423889, -0.9260970950126648, 0.285627543926239, 0.7432007193565369, -0.41746965050697327, -0.17680290341377258, 0.03319166973233223, 0.12893539667129517, -0.8387041091918945, -0.7989532351493835, -0.5024268627166748, -0.2135412096977234, -0.6936389207839966, 0.3141617178916931, -0.2636241614818573, -0.2832675874233246, -0.09601597487926483, -0.27135685086250305, -0.3230268359184265, -0.014591122046113014, 0.11878219991922379, 0.6968472003936768, -0.3807529807090759, -0.29146796464920044, -0.2459953874349594, -0.14380638301372528, 0.232997864484787, -0.08487505465745926, 0.3600340783596039, -0.4428838789463043, -0.4320499300956726, -0.38801151514053345, -0.008229726925492287, 0.7058549523353577, -0.08538369089365005, 0.7694951891899109, 0.4455242455005646, -0.29890093207359314, -0.043084558099508286, -0.3174661695957184, -0.27563661336898804, -0.5865364074707031, 0.27757200598716736, -0.4800720810890198, -1.013963222503662, 0.7816192507743835, 0.5451072454452515, 0.07996664941310883, 1.137570858001709, 0.6497021913528442, -0.2949812710285187, 1.0173296928405762, 0.019503578543663025, 0.4029647409915924, 0.38810694217681885, -0.6554970741271973, 0.11934899538755417, -0.901483416557312, -0.32612332701683044, -0.5640398859977722, -0.4394899308681488, -0.6839517951011658, -0.06932870298624039, 0.28030410408973694, 0.19376300275325775, -0.7250788807868958, 0.568886935710907, -0.842093825340271, 0.5749025344848633, 0.5619361400604248, 0.2496076375246048, 0.1691526472568512, -0.1479591280221939, -0.40303274989128113, -0.07956282794475555, -0.4653421640396118, -0.24593639373779297, 1.2009168863296509, 0.24232858419418335, 0.7309399247169495, 0.06847000122070312, 0.877861738204956, 0.12489724159240723, -0.059770915657281876, -0.5608501434326172, 0.6418536901473999, 0.163410946726799, -0.8424088954925537, -0.461906373500824, -0.4712691009044647, -1.1088366508483887, 0.4481857120990753, -0.13273318111896515, -0.858676016330719, 0.13688784837722778, 0.05886617302894592, -0.1628916710615158, 0.4828471541404724, -0.5812314748764038, 0.8572161793708801, -0.11144787818193436, -0.5030666589736938, 0.10005636513233185, -0.8281200528144836, 0.4548676609992981, 0.1825011521577835, 0.2767817974090576, -0.001838327501900494, 0.25932374596595764, 1.1773228645324707, -0.8553091287612915, 0.41916579008102417, 0.08329018205404282, 0.02805476076900959, 0.3356330096721649, -0.16748754680156708, 0.5169031023979187, 0.024819232523441315, -0.024353088811039925, -0.12981025874614716, 0.3293249011039734, -0.8840373158454895, -0.06965547800064087, 0.9054012298583984, -0.9466720223426819, -0.6263132691383362, -0.9034383893013, -0.5411280393600464, 0.03197714313864708, 0.5926848649978638, 0.3680031895637512, 0.5411237478256226, 0.011385861784219742, 0.42279717326164246, 0.8503329157829285, -0.12357331812381744, 0.6257652044296265, 0.24835166335105896, 0.07252508401870728, -0.6441657543182373, 0.8661037087440491, 0.1026483029127121, 0.37917545437812805, 0.23793217539787292, 0.4009135365486145, -0.5437244772911072, -0.19734178483486176, -0.22558091580867767, 0.5037788152694702, -0.6601345539093018, -0.24802395701408386, -0.36155813932418823, -0.38512328267097473, -0.7783098816871643, -0.6589243412017822, -0.30741623044013977, -0.4911346733570099, -0.4738394618034363, -0.4871828854084015, 0.6073780059814453, 0.4738475978374481, -0.38493406772613525, 0.01615958660840988, -0.5134348273277283, 0.25283581018447876, 0.33590665459632874, 0.5352325439453125, -0.35655713081359863, -0.5640491843223572, 0.012144718319177628, -0.1616109311580658, -0.5861421823501587, -1.0047491788864136, 0.37446171045303345, -0.06137486919760704, 0.5622866153717041, 0.5525270700454712, 0.09465742856264114, 0.8355823159217834, -0.26062288880348206, 1.0633970499038696, 0.3708497881889343, -0.7543450593948364, 0.6853204965591431, -0.3242841958999634, 0.1900598257780075, 0.6332083344459534, 0.17144420742988586, -0.1709892451763153, -0.7020208239555359, -1.2841970920562744, -0.7876970171928406, 0.6205126643180847, 0.39812445640563965, -0.27804630994796753, 0.051851022988557816, 0.10733859986066818, -0.2767075002193451, -0.15470068156719208, -0.6414920091629028, -0.9095107913017273, -0.1857563555240631, -0.4997096061706543, 0.12447059899568558, 0.07937005162239075, -0.4184991121292114, -0.869934618473053, 0.9315977692604065, 0.018217066302895546, 0.5865901112556458, 0.46974122524261475, 0.04599885269999504, 0.0934314876794815, 0.4862724840641022, 0.9172911047935486, 0.8030439615249634, -0.44453683495521545, 0.41406679153442383, 0.43540170788764954, -1.0534571409225464, 0.48287829756736755, 0.3183601200580597, -0.053805623203516006, -0.013950121589004993, 0.4898827075958252, 0.44646430015563965, 0.027026943862438202, -0.22118419408798218, 0.6266068816184998, 0.03557105362415314, -0.5532748103141785, -0.378398060798645, 0.10946139693260193, -0.11093384772539139, -0.0166018083691597, 0.42476990818977356, -0.1743224710226059, -0.004547612275928259, -0.4654770493507385, 0.4684962332248688, 0.3756982684135437, -0.477949857711792, -0.14544059336185455, 0.6780030727386475, -0.19156430661678314, -0.18199756741523743, 0.3114449977874756, -0.19423715770244598, -0.6246784925460815, 1.1275622844696045, 0.6147524118423462, 0.6916787028312683, -0.24062354862689972, -0.06882525235414505, 0.8961609601974487, 0.371553897857666, -0.03738684952259064, 0.47814103960990906, 0.31997495889663696, -0.23671568930149078, 0.18207818269729614, -0.8935304880142212, -0.0391412153840065, 0.1112266257405281, -0.8087560534477234, 0.3379296362400055, -0.4710429906845093, -0.18551631271839142, 0.008479872718453407, 0.38704320788383484, -0.47725042700767517, 0.5376864671707153, -0.41053497791290283, 1.2405019998550415, -0.9458040595054626, 0.7094841003417969, 0.7108216285705566, -0.516042172908783, -1.0203678607940674, -0.5401120185852051, 0.011425107717514038, -0.7826906442642212, 0.5718808770179749, -0.05263284221291542, 0.1780770868062973, -0.08600462228059769, -0.700385570526123, -0.9054587483406067, 1.3926446437835693, -0.0707620158791542, -0.4342283606529236, 0.18892616033554077, -0.08562104403972626, 0.43923941254615784, 0.16500374674797058, 0.5713703036308289, 0.7864697575569153, 0.8439859747886658, -0.07503902167081833, -0.7848326563835144, 0.3203636109828949, -0.48581063747406006, -0.34677010774612427, 0.4392895996570587, -0.8932102918624878, 1.1859221458435059, 0.016596879810094833, 0.24128076434135437, -0.21047784388065338, 0.6508815288543701, 0.8300083875656128, 0.30091482400894165, 0.36252254247665405, 0.8636126518249512, 0.8551954627037048, -0.4722987115383148, 1.0284054279327393, -0.2087985873222351, 0.8761522769927979, 0.6900304555892944, 0.21705588698387146, 0.7569814920425415, 0.6784251928329468, -0.572984516620636, 0.537235677242279, 0.8383665680885315, -0.3170044720172882, 0.40915578603744507, 0.299389511346817, -0.1380729377269745, -0.14558881521224976, 0.4549623429775238, -0.8795199990272522, 0.06671358644962311, 0.06693167239427567, -0.30976587533950806, 0.060703981667757034, -0.40691784024238586, 0.30028894543647766, -0.1161293163895607, -0.07956969738006592, 0.36418357491493225, 0.03836853802204132, -0.4443587362766266, 0.9759401082992554, -0.20800623297691345, 0.7610995173454285, -0.528605043888092, -0.10025028139352798, -0.36754849553108215, 0.5805026888847351, -0.431044340133667, -1.0504480600357056, 0.16055379807949066, 0.03562876954674721, -0.14615009725093842, -0.14138083159923553, 0.7137967944145203, -0.22836355865001678, -0.7854223847389221, 0.14597313106060028, 0.08151688426733017, 0.07753812521696091, 0.5272944569587708, -0.6787020564079285, -0.34330686926841736, -0.0912393108010292, -0.5974440574645996, 0.12283745408058167, 0.3114929795265198, 0.2661956548690796, 0.5319525003433228, 0.6196199655532837, 0.17364661395549774, 0.47503283619880676, -0.5511091351509094, 0.7566537857055664, -1.0449756383895874, -0.705152690410614, -0.9167394042015076, 0.45443251729011536, -0.3801233768463135, -0.8572619557380676, 1.0285799503326416, 1.0254000425338745, 0.8593425154685974, -0.03384336084127426, 0.6337984800338745, -0.3640896677970886, 0.26689061522483826, -0.36370140314102173, 0.9578444361686707, -0.8580945730209351, -0.25608596205711365, -0.25661346316337585, -0.7136327624320984, -0.3596976697444916, 0.8186485171318054, -0.1827118843793869, 0.027910912409424782, 1.0832465887069702, 0.6980710029602051, -0.10332462936639786, 0.023900937288999557, -0.011793144978582859, 0.5764318108558655, 0.37951159477233887, 0.9322608709335327, 0.6351823806762695, -0.7373349666595459, 0.33622506260871887, -0.49522238969802856, -0.40979769825935364, -0.40558093786239624, -0.44957298040390015, -0.8946505784988403, -0.48819977045059204, -0.21376341581344604, -0.6662131547927856, -0.13642536103725433, 1.0193734169006348, 0.45899778604507446, -0.9427167773246765, -0.41821178793907166, -0.09542214870452881, 0.19137856364250183, -0.5871602892875671, -0.41809237003326416, 0.7430902719497681, -0.13831526041030884, -0.5388177037239075, 0.1876412332057953, -0.1145697757601738, 0.22680194675922394, 0.06225882098078728, -0.4362207353115082, -0.7097139954566956, 0.07393483817577362, 0.4743744134902954, 0.3461098074913025, -0.706244945526123, -0.7149940133094788, 0.32265186309814453, -0.5473837852478027, 0.41541236639022827, -0.04358191788196564, -0.4930155873298645, 0.057033468037843704, 0.7223688364028931, 0.47390449047088623, 0.6831201910972595, -0.05232146754860878, 0.0907132551074028, -0.6246105432510376, 0.15745879709720612, -0.028592748567461967, 0.2808348834514618, -0.03093203529715538, -0.29920926690101624, 0.7490689754486084, 0.6925903558731079, -0.5285722017288208, -1.1050121784210205, -0.4552898108959198, -1.4336577653884888, -0.04641379415988922, 1.1223702430725098, 0.041290245950222015, -0.5205668807029724, 0.18410256505012512, -0.1476302295923233, 0.18854574859142303, -0.3045061230659485, 0.7276667356491089, 0.8266212344169617, -0.3943600654602051, 0.12038473784923553, -0.6658103466033936, 0.3921438455581665, 0.4812895357608795, -1.1757900714874268, -0.07097652554512024, 0.28903311491012573, 0.31968408823013306, 0.3561514616012573, 0.6387636065483093, -0.09037847071886063, 0.28515973687171936, 0.253762423992157, 0.048476431518793106, -0.002727559534832835, 0.06963399052619934, -0.2052740603685379, 0.12286033481359482, -0.25775620341300964, -0.4349434971809387 ]
open-llm-leaderboard/details_TheBloke__airoboros-13B-HF
open-llm-leaderboard
2023-10-23T02:12:50Z
201
0
[ "region:us" ]
null
2023-08-18T11:26:26Z
--- pretty_name: Evaluation run of TheBloke/airoboros-13B-HF dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/airoboros-13B-HF](https://huggingface.co/TheBloke/airoboros-13B-HF)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__airoboros-13B-HF\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-23T02:12:37.195873](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__airoboros-13B-HF/blob/main/results_2023-10-23T02-12-37.195873.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.11115771812080537,\n\ \ \"em_stderr\": 0.00321900621779522,\n \"f1\": 0.18403838087248262,\n\ \ \"f1_stderr\": 0.003410322751505753,\n \"acc\": 0.416848524958218,\n\ \ \"acc_stderr\": 0.009523880516878821\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.11115771812080537,\n \"em_stderr\": 0.00321900621779522,\n\ \ \"f1\": 0.18403838087248262,\n \"f1_stderr\": 0.003410322751505753\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0712661106899166,\n \ \ \"acc_stderr\": 0.007086462127954497\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803145\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/airoboros-13B-HF leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|arc:challenge|25_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T19:05:45.973556.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_23T02_12_37.195873 path: - '**/details_harness|drop|3_2023-10-23T02-12-37.195873.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-23T02-12-37.195873.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_23T02_12_37.195873 path: - '**/details_harness|gsm8k|5_2023-10-23T02-12-37.195873.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-23T02-12-37.195873.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hellaswag|10_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:05:45.973556.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:05:45.973556.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T19_05_45.973556 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T19:05:45.973556.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T19:05:45.973556.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_23T02_12_37.195873 path: - '**/details_harness|winogrande|5_2023-10-23T02-12-37.195873.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-23T02-12-37.195873.parquet' - config_name: results data_files: - split: 2023_07_19T19_05_45.973556 path: - results_2023-07-19T19:05:45.973556.parquet - split: 2023_10_23T02_12_37.195873 path: - results_2023-10-23T02-12-37.195873.parquet - split: latest path: - results_2023-10-23T02-12-37.195873.parquet --- # Dataset Card for Evaluation run of TheBloke/airoboros-13B-HF ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/airoboros-13B-HF - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/airoboros-13B-HF](https://huggingface.co/TheBloke/airoboros-13B-HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__airoboros-13B-HF", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T02:12:37.195873](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__airoboros-13B-HF/blob/main/results_2023-10-23T02-12-37.195873.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.11115771812080537, "em_stderr": 0.00321900621779522, "f1": 0.18403838087248262, "f1_stderr": 0.003410322751505753, "acc": 0.416848524958218, "acc_stderr": 0.009523880516878821 }, "harness|drop|3": { "em": 0.11115771812080537, "em_stderr": 0.00321900621779522, "f1": 0.18403838087248262, "f1_stderr": 0.003410322751505753 }, "harness|gsm8k|5": { "acc": 0.0712661106899166, "acc_stderr": 0.007086462127954497 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803145 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3825654983520508, -0.6512101888656616, 0.07800751179456711, 0.2750898003578186, -0.163820281624794, 0.15624544024467468, -0.3891944885253906, -0.20832520723342896, 0.46151381731033325, 0.47591984272003174, -0.7268068194389343, -0.8541305661201477, -0.6281830668449402, 0.1418774127960205, -0.17110641300678253, 1.0736349821090698, -0.2504007816314697, -0.3111345171928406, 0.08964013308286667, -0.2564161717891693, -0.37585213780403137, -0.41249558329582214, -0.46238186955451965, -0.47451159358024597, 0.34289035201072693, 0.5900963544845581, 0.3657066822052002, 0.7213143706321716, 0.6848253011703491, 0.35484838485717773, -0.17609961330890656, 0.18882125616073608, -0.45474594831466675, -0.11111216247081757, 0.22709062695503235, -0.48781105875968933, -0.6931880116462708, 0.09451709687709808, 0.6634302139282227, 0.4836575984954834, -0.37569063901901245, 0.5364845991134644, 0.07219021022319794, 0.5533013343811035, -0.5057985782623291, 0.30637574195861816, -0.4209297299385071, -0.03749378025531769, -0.34277382493019104, -0.12194839119911194, -0.02470935508608818, -0.32087644934654236, -0.20179440081119537, -0.6040046215057373, 0.10200048238039017, 0.21567799150943756, 1.2368429899215698, 0.09676547348499298, -0.13109347224235535, -0.24242989718914032, -0.2592623829841614, 0.8711941242218018, -0.963347852230072, 0.06621658056974411, 0.6655474901199341, 0.16332793235778809, -0.2189539670944214, -0.5016225576400757, -0.3289652168750763, -0.07972115278244019, -0.24823522567749023, 0.24221183359622955, -0.0909600779414177, -0.09491422772407532, 0.347932904958725, 0.6325740218162537, -0.7332723140716553, 0.07243166863918304, -0.579522967338562, -0.2742604613304138, 0.9801083207130432, 0.34204620122909546, 0.05635835602879524, -0.6085976362228394, -0.38124778866767883, -0.2995283007621765, -0.44482356309890747, 0.23700742423534393, 0.40377241373062134, 0.47066086530685425, -0.6926079392433167, 0.7783070802688599, -0.4909254014492035, 0.6103914976119995, -0.05450252816081047, -0.2990413010120392, 0.8422916531562805, -0.6218704581260681, -0.2768500745296478, 0.050481270998716354, 1.0228065252304077, 0.3807890713214874, 0.0451875738799572, 0.25501343607902527, -0.2569047808647156, -0.14755703508853912, 0.06377869099378586, -0.8308140635490417, -0.12569373846054077, 0.3820423185825348, -0.6249396800994873, -0.4288736581802368, 0.29118117690086365, -0.9105878472328186, -0.09929466992616653, -0.2306881546974182, 0.23713703453540802, -0.22294089198112488, -0.4166521728038788, -0.08143563568592072, -0.1873214691877365, 0.19261717796325684, 0.14365997910499573, -0.5816246867179871, 0.3350313603878021, 0.5691619515419006, 0.9752031564712524, -0.03367985039949417, -0.4498665928840637, -0.32888728380203247, -0.1697106957435608, -0.24126924574375153, 0.5062835812568665, -0.2384246587753296, -0.44977614283561707, -0.17015430331230164, 0.2429646998643875, -0.34872084856033325, -0.6974673867225647, 0.6212615966796875, -0.18621771037578583, 0.12433059513568878, -0.2434106171131134, -0.4736417531967163, -0.11837919801473618, 0.34722253680229187, -0.5893003344535828, 1.4712917804718018, 0.36785128712654114, -0.7634618878364563, 0.10104550421237946, -0.8447619080543518, -0.27135780453681946, 0.03813422471284866, 0.05758088082075119, -0.5752674341201782, -0.15818904340267181, 0.13459298014640808, 0.5371944904327393, -0.2763211727142334, 0.07472597062587738, -0.3442641496658325, -0.37173929810523987, 0.13259509205818176, -0.0602872334420681, 1.1392754316329956, 0.2671838104724884, -0.41825947165489197, 0.07850632071495056, -1.001542091369629, 0.047938764095306396, 0.3003622591495514, -0.6139985918998718, -0.18605056405067444, -0.29694807529449463, 0.15939295291900635, 0.1517154425382614, 0.5160215497016907, -0.5898460745811462, 0.3230086863040924, -0.20316366851329803, 0.29732322692871094, 0.9473732709884644, 0.05448456108570099, 0.20533369481563568, -0.4629039466381073, 0.5697335004806519, 0.03854593634605408, 0.35653671622276306, 0.03640781715512276, -0.5540087819099426, -0.6816946268081665, -0.2773568630218506, 0.11127666383981705, 0.6903433799743652, -0.465755432844162, 0.8195539712905884, -0.31647804379463196, -0.7695425748825073, -0.6990253329277039, 0.23012548685073853, 0.4291115701198578, 0.5632433891296387, 0.3707980513572693, -0.16622242331504822, -0.7159534096717834, -0.9350239038467407, 0.0349678210914135, -0.3316038250923157, 0.11140729486942291, 0.5640080571174622, 0.9857726693153381, -0.351448655128479, 0.6153250932693481, -0.759368360042572, -0.34304988384246826, -0.25820910930633545, 0.12433917820453644, 0.6923533082008362, 0.5260189771652222, 0.49688342213630676, -0.6033640503883362, -0.32367077469825745, 0.018768001347780228, -0.799945592880249, -0.3824057877063751, -0.1423477828502655, -0.3241175413131714, 0.3221612870693207, -0.03245612606406212, -0.5787074565887451, 0.5731828808784485, 0.5369440913200378, -0.6120032668113708, 0.677558183670044, -0.014641376212239265, 0.3566274642944336, -1.1438384056091309, 0.23996230959892273, 0.01729436218738556, 0.11746671795845032, -0.3750198483467102, -0.12225621193647385, 0.009669078513979912, 0.26317378878593445, -0.41241124272346497, 0.7470747232437134, -0.4201957583427429, -0.1675574779510498, 0.021130865439772606, 0.1152694821357727, -0.013036509044468403, 0.5302383303642273, -0.23445749282836914, 0.6753495931625366, 0.4948159158229828, -0.32827067375183105, 0.4447309374809265, 0.5157081484794617, -0.5407174825668335, 0.2808016836643219, -0.47829556465148926, -0.11237823963165283, 0.23407144844532013, 0.15715046226978302, -0.9271268844604492, -0.420810341835022, 0.3990512490272522, -0.6160378456115723, 0.15066054463386536, -0.2280898243188858, -0.513214647769928, -0.5456452369689941, -0.483516663312912, 0.1605982780456543, 0.5063034892082214, -0.47021761536598206, 0.27999722957611084, 0.3740960657596588, -0.0059808227233588696, -0.6889488697052002, -0.7267651557922363, -0.1172931045293808, -0.28223171830177307, -0.6219883561134338, 0.37251150608062744, -0.17351602017879486, -0.2599164545536041, 0.05861344933509827, -0.12897570431232452, -0.13885793089866638, 0.2274511158466339, 0.37019628286361694, 0.5603417754173279, -0.07402116060256958, -0.3564869165420532, -0.13884174823760986, -0.0005719639011658728, 0.07550529390573502, 0.20214314758777618, 0.5548847913742065, -0.2273515909910202, -0.18252550065517426, -0.23962616920471191, 0.10558322072029114, 0.44919553399086, -0.1841811239719391, 0.8149863481521606, 0.6635375022888184, -0.3247230350971222, -0.06798083335161209, -0.4547283947467804, 0.025937261059880257, -0.4931736886501312, 0.25136247277259827, -0.26642876863479614, -0.7735562920570374, 0.8655007481575012, 0.2646380066871643, 0.09080509096384048, 0.6413214802742004, 0.5985527634620667, -0.01661188155412674, 0.6528801918029785, 0.14594952762126923, -0.14726462960243225, 0.5620390176773071, -0.7947332859039307, -0.10913517326116562, -1.093996524810791, -0.48439672589302063, -0.5357374548912048, -0.3669474720954895, -0.7278538346290588, -0.26817673444747925, 0.2394484579563141, 0.29462674260139465, -0.44239866733551025, 0.5640016794204712, -0.6059136986732483, 0.1500415802001953, 0.63577800989151, 0.22262398898601532, 0.001646332792006433, -0.07193967700004578, -0.13699693977832794, 0.28727349638938904, -0.5370386838912964, -0.3113306760787964, 1.3052889108657837, 0.21460263431072235, 0.6737006902694702, -0.030191773548722267, 1.0064574480056763, 0.24677129089832306, 0.34236234426498413, -0.5726567506790161, 0.620667040348053, -0.0050438037142157555, -0.7034025192260742, -0.22126175463199615, -0.6198028922080994, -0.9740239381790161, 0.19542068243026733, -0.06692775338888168, -0.8694750070571899, 0.131874218583107, 0.0030292365700006485, -0.06502192467451096, 0.29591459035873413, -0.536888062953949, 0.8388068675994873, -0.2619156837463379, -0.3806365728378296, 0.015467783436179161, -0.8309098482131958, 0.3528642952442169, 0.02685135044157505, 0.456679105758667, -0.3121182918548584, -0.0030432993080466986, 1.1963573694229126, -0.6498794555664062, 0.6980671882629395, -0.296343594789505, 0.09250014275312424, 0.4628910422325134, -0.31110891699790955, 0.4810112714767456, -0.11614684015512466, -0.3237290382385254, 0.5141384601593018, -0.18579213321208954, -0.31089529395103455, -0.27044254541397095, 0.8525004386901855, -0.8690674304962158, -0.320749431848526, -0.4117542803287506, -0.5445719957351685, 0.2851403057575226, 0.3786960244178772, 0.3678662180900574, 0.38658833503723145, 0.058312226086854935, 0.31240344047546387, 0.20981623232364655, -0.20674602687358856, 0.5907968282699585, 0.4743576943874359, -0.21843595802783966, -0.7928178906440735, 0.7154643535614014, 0.3314969837665558, 0.06180303171277046, 0.2823915481567383, 0.14369183778762817, -0.47731834650039673, -0.44841161370277405, -0.42253443598747253, 0.31812918186187744, -0.5377383828163147, -0.40244898200035095, -0.33766499161720276, -0.24076367914676666, -0.4508945047855377, -0.0900934487581253, -0.3361881375312805, -0.4692515432834625, -0.4369875192642212, -0.3034439980983734, 0.5986706614494324, 0.5524893999099731, -0.4872116148471832, 0.3573767840862274, -0.8061133027076721, 0.2776106297969818, -0.1782005876302719, 0.3901209831237793, -0.0818013846874237, -0.6117951273918152, -0.5393237471580505, 0.14267845451831818, -0.43512973189353943, -0.8816370368003845, 0.6148398518562317, 0.07678499817848206, 0.7444126605987549, 0.18828395009040833, 0.1082356795668602, 0.8144152760505676, -0.22700868546962738, 1.056119441986084, -0.0020578564144670963, -0.8062517642974854, 0.8322210311889648, -0.35264137387275696, 0.14734117686748505, 0.5236778259277344, 0.19214916229248047, -0.44393807649612427, -0.2841323912143707, -0.8954225182533264, -1.0684783458709717, 1.0591915845870972, 0.5817738175392151, -0.3467090427875519, 0.10492579638957977, 0.32445475459098816, -0.1306866556406021, 0.23685207962989807, -0.6205170154571533, -0.7045329213142395, -0.245624840259552, -0.34465885162353516, -0.007377239875495434, 0.06365817040205002, -0.3802565038204193, -0.48168352246284485, 0.909669041633606, 0.028730612248182297, 0.4983825981616974, 0.1851787120103836, -0.02318052016198635, -0.182935893535614, 0.21241916716098785, 0.4667353630065918, 0.724901556968689, -0.4872272312641144, -0.12772497534751892, 0.18441316485404968, -0.6103086471557617, 0.13044807314872742, 0.3360809087753296, -0.014584380201995373, -0.05813875421881676, 0.7008069753646851, 1.0118651390075684, 0.13425207138061523, -0.5073143839836121, 0.532784640789032, 0.19814039766788483, -0.3147534728050232, -0.38232940435409546, 0.16555102169513702, -0.04702720046043396, 0.2632002830505371, 0.5118760466575623, -0.11466633528470993, 0.055809136480093, -0.3212013244628906, 0.294823557138443, 0.2669055163860321, -0.01726425252854824, -0.2837896943092346, 0.5542399883270264, 0.02936663292348385, -0.37002384662628174, 0.7142569422721863, -0.05992007628083229, -0.6086116433143616, 1.1752614974975586, 0.34094610810279846, 0.8471335172653198, -0.2272835224866867, 0.11408219486474991, 0.6054991483688354, 0.4283800721168518, -0.09966345876455307, 0.6000637412071228, 0.052855804562568665, -0.6722514629364014, -0.25569573044776917, -0.7817258238792419, -0.263693630695343, 0.30780160427093506, -1.1312459707260132, 0.3305872082710266, -0.1366855502128601, -0.21887560188770294, -0.10619617253541946, 0.40544450283050537, -0.7703453898429871, 0.2654477655887604, 0.08863318711519241, 0.9775944948196411, -1.0140382051467896, 0.6598476767539978, 0.8715322017669678, -0.450214684009552, -0.962647557258606, -0.30090299248695374, 0.047666050493717194, -0.9108777642250061, 0.5451130867004395, 0.2737616002559662, 0.4125584065914154, -0.1761314868927002, -0.6389947533607483, -1.0148029327392578, 1.5129283666610718, 0.12531735002994537, -0.5569799542427063, 0.1221335157752037, 0.12030839920043945, 0.3222877085208893, -0.2581712007522583, 0.5230159759521484, 0.6736071705818176, 0.7846423983573914, -0.08447906374931335, -1.0051677227020264, 0.2924658954143524, -0.4860512912273407, -0.0899645984172821, 0.302026629447937, -0.9822714328765869, 0.9284088015556335, -0.15884177386760712, 0.043499335646629333, -0.058962032198905945, 0.3324850797653198, 0.6142609715461731, 0.3721586763858795, 0.4256948232650757, 0.7943299412727356, 0.6836268305778503, -0.34540683031082153, 0.9879170656204224, -0.31395193934440613, 0.8807809352874756, 1.004944920539856, -0.02960098162293434, 0.7996437549591064, 0.3272799849510193, -0.4498218297958374, 0.5938569903373718, 0.8371886014938354, -0.3955437242984772, 0.4992518424987793, 0.12314040958881378, 0.005537133663892746, -0.03392559662461281, 0.051128827035427094, -0.4366244971752167, 0.453276664018631, 0.23995359241962433, -0.43402618169784546, -0.18674582242965698, -0.18607425689697266, 0.10415221750736237, -0.3438260853290558, -0.2748764455318451, 0.6448639631271362, -0.005424708593636751, -0.5332142114639282, 0.8334897756576538, -0.11352379620075226, 0.7194115519523621, -0.6826320886611938, -0.13982930779457092, -0.3019019663333893, 0.1732255071401596, -0.5548463463783264, -0.9653534293174744, 0.26157206296920776, 0.09085885435342789, -0.16299742460250854, -0.16344501078128815, 0.6461044549942017, -0.3042706251144409, -0.5904501676559448, 0.5088620781898499, 0.37439045310020447, 0.27226927876472473, 0.18667952716350555, -0.9683509469032288, 0.3288796842098236, 0.2580690383911133, -0.8226332068443298, 0.2995256185531616, 0.28015077114105225, 0.19125504791736603, 0.5998131632804871, 0.6689788103103638, 0.16996119916439056, 0.18267399072647095, -0.15057848393917084, 0.9983500838279724, -0.8019941449165344, -0.37149709463119507, -0.7871753573417664, 0.82158362865448, -0.25010573863983154, -0.6553058624267578, 0.844419002532959, 0.9950475692749023, 0.8309257626533508, 0.12416909635066986, 0.8455334305763245, -0.49863940477371216, 0.5313556790351868, -0.422249972820282, 0.8397694230079651, -0.7811166644096375, 0.3021319508552551, -0.19597823917865753, -0.7722312808036804, 0.0080019012093544, 0.7482248544692993, -0.09781437367200851, -0.009275528602302074, 0.5551179051399231, 0.9972060918807983, 0.09162826091051102, 0.08792448788881302, -0.05360729619860649, 0.4817279279232025, 0.3116820454597473, 0.6046361923217773, 0.6412087678909302, -0.6917241215705872, 0.4846518635749817, -0.7241317629814148, -0.43491673469543457, -0.2752496898174286, -0.7075239419937134, -0.7612945437431335, -0.48788535594940186, -0.36098015308380127, -0.5256406664848328, -0.0380370169878006, 1.0789296627044678, 0.5649187564849854, -0.8799938559532166, -0.5734959840774536, -0.010230000130832195, 0.15978600084781647, -0.2745686173439026, -0.37837523221969604, 0.5225816369056702, -0.03779403492808342, -0.676667332649231, 0.37796998023986816, -0.11192356795072556, -0.10409615188837051, 0.04166705533862114, -0.23146072030067444, -0.42074158787727356, -0.15757615864276886, 0.43996676802635193, 0.16352538764476776, -0.6998338103294373, -0.27425625920295715, -0.04430267587304115, 0.01170042622834444, 0.2752284109592438, 0.2639549672603607, -0.5351514220237732, 0.04186815023422241, 0.5618820786476135, 0.28484493494033813, 0.6826919913291931, 0.046172283589839935, 0.1295369565486908, -0.7559754252433777, -0.008042597211897373, -0.030627692118287086, 0.5314004421234131, 0.19461022317409515, -0.45438677072525024, 0.9964797496795654, 0.4058314859867096, -0.7088544964790344, -1.0134503841400146, -0.31010687351226807, -1.2852423191070557, -0.11318390816450119, 1.435921311378479, -0.2717653214931488, -0.28453168272972107, 0.11754344403743744, -0.20070041716098785, 0.35188671946525574, -0.7691234350204468, 0.5199496746063232, 0.7885894179344177, -0.4054951071739197, 0.0013738240813836455, -0.5087882876396179, 0.26785311102867126, 0.03281018137931824, -1.0263121128082275, 0.01793031208217144, 0.3798503279685974, 0.4120003581047058, 0.23922796547412872, 0.6518241763114929, 0.037435293197631836, -0.11657987534999847, -0.009788008406758308, 0.2022453248500824, -0.2685602009296417, -0.15575644373893738, -0.24655897915363312, 0.11174193024635315, -0.41283944249153137, -0.42669427394866943 ]
open-llm-leaderboard/details_TheBloke__Wizard-Vicuna-30B-Superhot-8K-fp16
open-llm-leaderboard
2023-08-27T12:33:55Z
201
0
[ "region:us" ]
null
2023-08-18T11:27:36Z
--- pretty_name: Evaluation run of TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16](https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__Wizard-Vicuna-30B-Superhot-8K-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-31T18:46:06.024423](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Wizard-Vicuna-30B-Superhot-8K-fp16/blob/main/results_2023-07-31T18%3A46%3A06.024423.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23519468841762173,\n\ \ \"acc_stderr\": 0.030867946729594396,\n \"acc_norm\": 0.23665032922383497,\n\ \ \"acc_norm_stderr\": 0.03088234450623421,\n \"mc1\": 0.22766217870257038,\n\ \ \"mc1_stderr\": 0.01467925503211107,\n \"mc2\": 0.4747511496520905,\n\ \ \"mc2_stderr\": 0.016743067237896876\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.22525597269624573,\n \"acc_stderr\": 0.012207839995407312,\n\ \ \"acc_norm\": 0.2619453924914676,\n \"acc_norm_stderr\": 0.012849054826858115\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2804222266480781,\n\ \ \"acc_stderr\": 0.004482874732237348,\n \"acc_norm\": 0.3296156144194384,\n\ \ \"acc_norm_stderr\": 0.004691128722535483\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.1925925925925926,\n\ \ \"acc_stderr\": 0.03406542058502653,\n \"acc_norm\": 0.1925925925925926,\n\ \ \"acc_norm_stderr\": 0.03406542058502653\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n\ \ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.19245283018867926,\n \"acc_stderr\": 0.024262979839372277,\n\ \ \"acc_norm\": 0.19245283018867926,\n \"acc_norm_stderr\": 0.024262979839372277\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n\ \ \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \ \ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135303,\n\ \ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135303\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.24867724867724866,\n \"acc_stderr\": 0.022261817692400168,\n \"\ acc_norm\": 0.24867724867724866,\n \"acc_norm_stderr\": 0.022261817692400168\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.18064516129032257,\n \"acc_stderr\": 0.02188617856717255,\n \"\ acc_norm\": 0.18064516129032257,\n \"acc_norm_stderr\": 0.02188617856717255\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"\ acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.03427743175816524,\n\ \ \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.03427743175816524\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"\ acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.18652849740932642,\n \"acc_stderr\": 0.028112091210117447,\n\ \ \"acc_norm\": 0.18652849740932642,\n \"acc_norm_stderr\": 0.028112091210117447\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.2128205128205128,\n \"acc_stderr\": 0.020752423722127995,\n\ \ \"acc_norm\": 0.2128205128205128,\n \"acc_norm_stderr\": 0.020752423722127995\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24814814814814815,\n \"acc_stderr\": 0.0263357394040558,\n \ \ \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.0263357394040558\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.23949579831932774,\n \"acc_stderr\": 0.027722065493361255,\n\ \ \"acc_norm\": 0.23949579831932774,\n \"acc_norm_stderr\": 0.027722065493361255\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"\ acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"\ acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.2361111111111111,\n \"acc_stderr\": 0.02896370257079103,\n \"\ acc_norm\": 0.2361111111111111,\n \"acc_norm_stderr\": 0.02896370257079103\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598035,\n\ \ \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598035\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n\ \ \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n\ \ \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\ \ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.27350427350427353,\n\ \ \"acc_stderr\": 0.029202540153431163,\n \"acc_norm\": 0.27350427350427353,\n\ \ \"acc_norm_stderr\": 0.029202540153431163\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n\ \ \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n\ \ \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.023083658586984204,\n\ \ \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.023083658586984204\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24581005586592178,\n\ \ \"acc_stderr\": 0.01440029642922562,\n \"acc_norm\": 0.24581005586592178,\n\ \ \"acc_norm_stderr\": 0.01440029642922562\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1832797427652733,\n\ \ \"acc_stderr\": 0.021974198848265805,\n \"acc_norm\": 0.1832797427652733,\n\ \ \"acc_norm_stderr\": 0.021974198848265805\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\ \ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \ \ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\ \ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\ \ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\ \ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.20909090909090908,\n \"acc_stderr\": 0.03895091015724136,\n\ \ \"acc_norm\": 0.20909090909090908,\n \"acc_norm_stderr\": 0.03895091015724136\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.22040816326530613,\n\ \ \"acc_stderr\": 0.02653704531214529,\n \"acc_norm\": 0.22040816326530613,\n\ \ \"acc_norm_stderr\": 0.02653704531214529\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\ \ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.28654970760233917,\n\ \ \"acc_stderr\": 0.034678266857038266,\n \"acc_norm\": 0.28654970760233917,\n\ \ \"acc_norm_stderr\": 0.034678266857038266\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.01467925503211107,\n\ \ \"mc2\": 0.4747511496520905,\n \"mc2_stderr\": 0.016743067237896876\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|arc:challenge|25_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hellaswag|10_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T18:46:06.024423.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T18:46:06.024423.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T18_46_06.024423 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T18:46:06.024423.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T18:46:06.024423.parquet' - config_name: results data_files: - split: 2023_07_31T18_46_06.024423 path: - results_2023-07-31T18:46:06.024423.parquet - split: latest path: - results_2023-07-31T18:46:06.024423.parquet --- # Dataset Card for Evaluation run of TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16](https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__Wizard-Vicuna-30B-Superhot-8K-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-31T18:46:06.024423](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Wizard-Vicuna-30B-Superhot-8K-fp16/blob/main/results_2023-07-31T18%3A46%3A06.024423.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23519468841762173, "acc_stderr": 0.030867946729594396, "acc_norm": 0.23665032922383497, "acc_norm_stderr": 0.03088234450623421, "mc1": 0.22766217870257038, "mc1_stderr": 0.01467925503211107, "mc2": 0.4747511496520905, "mc2_stderr": 0.016743067237896876 }, "harness|arc:challenge|25": { "acc": 0.22525597269624573, "acc_stderr": 0.012207839995407312, "acc_norm": 0.2619453924914676, "acc_norm_stderr": 0.012849054826858115 }, "harness|hellaswag|10": { "acc": 0.2804222266480781, "acc_stderr": 0.004482874732237348, "acc_norm": 0.3296156144194384, "acc_norm_stderr": 0.004691128722535483 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.1925925925925926, "acc_stderr": 0.03406542058502653, "acc_norm": 0.1925925925925926, "acc_norm_stderr": 0.03406542058502653 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.19245283018867926, "acc_stderr": 0.024262979839372277, "acc_norm": 0.19245283018867926, "acc_norm_stderr": 0.024262979839372277 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135303, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135303 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.022261817692400168, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.022261817692400168 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.18064516129032257, "acc_stderr": 0.02188617856717255, "acc_norm": 0.18064516129032257, "acc_norm_stderr": 0.02188617856717255 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.03427743175816524, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.03427743175816524 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.18652849740932642, "acc_stderr": 0.028112091210117447, "acc_norm": 0.18652849740932642, "acc_norm_stderr": 0.028112091210117447 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2128205128205128, "acc_stderr": 0.020752423722127995, "acc_norm": 0.2128205128205128, "acc_norm_stderr": 0.020752423722127995 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23949579831932774, "acc_stderr": 0.027722065493361255, "acc_norm": 0.23949579831932774, "acc_norm_stderr": 0.027722065493361255 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2361111111111111, "acc_stderr": 0.02896370257079103, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.02896370257079103 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598035, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598035 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.27350427350427353, "acc_stderr": 0.029202540153431163, "acc_norm": 0.27350427350427353, "acc_norm_stderr": 0.029202540153431163 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24277456647398843, "acc_stderr": 0.023083658586984204, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.023083658586984204 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.01440029642922562, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.01440029642922562 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1832797427652733, "acc_stderr": 0.021974198848265805, "acc_norm": 0.1832797427652733, "acc_norm_stderr": 0.021974198848265805 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.03895091015724136, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.03895091015724136 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.22040816326530613, "acc_stderr": 0.02653704531214529, "acc_norm": 0.22040816326530613, "acc_norm_stderr": 0.02653704531214529 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.28654970760233917, "acc_stderr": 0.034678266857038266, "acc_norm": 0.28654970760233917, "acc_norm_stderr": 0.034678266857038266 }, "harness|truthfulqa:mc|0": { "mc1": 0.22766217870257038, "mc1_stderr": 0.01467925503211107, "mc2": 0.4747511496520905, "mc2_stderr": 0.016743067237896876 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7130002379417419, -0.8463862538337708, 0.2561344504356384, 0.1900828629732132, -0.16668811440467834, -0.07037767022848129, 0.03196470066905022, -0.22767376899719238, 0.5744678974151611, -0.052206721156835556, -0.5217768549919128, -0.6814706921577454, -0.4303436279296875, 0.24335460364818573, -0.04056713730096817, 0.8304420113563538, -0.18372966349124908, -0.11262647807598114, 0.11269627511501312, -0.02802126295864582, -0.29795727133750916, -0.3266960382461548, -0.48603999614715576, -0.36142024397850037, 0.2019084244966507, 0.44788941740989685, 0.44851696491241455, 0.7577904462814331, 0.6969088912010193, 0.3010476529598236, -0.29924511909484863, 0.02305801771581173, -0.17794984579086304, -0.27120712399482727, 0.37394705414772034, -0.39031898975372314, -0.8125250339508057, 0.2703959345817566, 0.7549449801445007, 0.5764630436897278, -0.05945512652397156, 0.2873189151287079, 0.005989829543977976, 0.5825467109680176, -0.3821108341217041, 0.029734933748841286, -0.3046323359012604, 0.24414245784282684, -0.1906459480524063, -0.3104778230190277, -0.29355424642562866, -0.21191701292991638, -0.1554577499628067, -0.8956405520439148, 0.31600216031074524, 0.32894167304039, 1.5901247262954712, -0.15168920159339905, -0.20840668678283691, 0.12948036193847656, -0.14833855628967285, 0.9901614785194397, -0.8649236559867859, 0.37164705991744995, 0.763939619064331, 0.15100018680095673, -0.19354026019573212, -0.5764434933662415, -0.6683826446533203, 0.06032264977693558, -0.339462012052536, 0.36827191710472107, -0.04295821487903595, -0.16648219525814056, 0.3672632873058319, 0.6905357837677002, -0.6781966686248779, 0.17490941286087036, -0.6576434969902039, -0.18218356370925903, 1.0744991302490234, 0.3733624219894409, 0.08000409603118896, -0.36679986119270325, -0.6764423847198486, -0.6513152122497559, -0.4202050566673279, 0.23790965974330902, 0.4088134765625, 0.3348427414894104, -0.3702971339225769, 0.7266291975975037, -0.3963237702846527, 0.5726723074913025, 0.43407416343688965, 0.009273232892155647, 0.8493132591247559, -0.7085224390029907, -0.5444805026054382, -0.09103017300367355, 1.1176953315734863, 0.6152122616767883, 0.07268363237380981, 0.2126258760690689, 0.05162161588668823, -0.11743196099996567, 0.03480998054146767, -0.870088517665863, -0.3080619275569916, 0.2028016746044159, -0.3541678190231323, -0.4564846158027649, 0.3252613842487335, -0.8651185035705566, 0.10453929007053375, -0.028677821159362793, 0.4142078757286072, -0.5341575145721436, -0.13617688417434692, 0.22853955626487732, -0.46086007356643677, 0.8182758092880249, -0.1406717747449875, -0.8279635310173035, 0.3667249381542206, 0.5138305425643921, 0.7767651081085205, -0.07311194390058517, -0.4770182967185974, -0.08701165020465851, -0.09225305169820786, -0.33096468448638916, 0.5533573627471924, -0.24814660847187042, -0.41634848713874817, -0.2962370216846466, 0.31072351336479187, -0.2555316388607025, -0.3741181194782257, 0.6698632836341858, -0.24502362310886383, 0.19593548774719238, -0.42169708013534546, -0.6727387309074402, 0.15298661589622498, 0.4032873213291168, -0.43235018849372864, 1.296867847442627, 0.26648640632629395, -0.8083367943763733, 0.4662559926509857, -0.57759690284729, -0.11530753970146179, 0.008558777160942554, -0.05737872049212456, -0.7882676720619202, -0.24221505224704742, 0.1648804098367691, 0.41341713070869446, -0.14221514761447906, -0.1345260888338089, -0.3737010657787323, -0.37058088183403015, 0.32997772097587585, -0.1848633885383606, 1.2291513681411743, -0.04553697258234024, -0.7772202491760254, -0.10199584811925888, -1.2459362745285034, 0.2813386917114258, 0.21761079132556915, -0.3414563536643982, -0.17491406202316284, -0.5206174254417419, -0.02041134424507618, 0.16299548745155334, 0.27729374170303345, -0.8004663586616516, 0.3027612566947937, -0.35726815462112427, 0.11113511025905609, 1.255059003829956, 0.051958706229925156, 0.16332873702049255, -0.5212782621383667, 0.5365862250328064, 0.18860408663749695, 0.2249971181154251, 0.39269742369651794, -0.6010404229164124, -0.8016232252120972, -0.5045928359031677, -0.042854711413383484, 0.6018627882003784, -0.23194125294685364, 1.1376352310180664, 0.08302470296621323, -0.917603075504303, -0.4807758033275604, -0.11497368663549423, 0.5162537097930908, 0.7676569819450378, 0.6286261677742004, -0.042009297758340836, -0.6243928074836731, -1.1151424646377563, -0.30324000120162964, -0.16613075137138367, 0.11950808763504028, 0.19309104979038239, 1.0217686891555786, -0.23951296508312225, 0.603606641292572, -1.0181334018707275, -0.20976586639881134, 0.15336720645427704, -0.07834719121456146, 0.7706157565116882, 0.730918824672699, 0.5784664750099182, -0.6684013605117798, -0.5429126620292664, 0.212631955742836, -0.8877242207527161, -0.07370270043611526, 0.09676983207464218, -0.3226955831050873, 0.16192100942134857, 0.1360374093055725, -0.7158129215240479, 0.556147575378418, 0.20493002235889435, -1.0808405876159668, 1.106934666633606, -0.348071426153183, 0.5993298292160034, -0.9884663820266724, 0.1625307947397232, -0.06117028370499611, 0.04069485515356064, -0.5103933215141296, 0.054882485419511795, 0.11489421129226685, 0.4674586057662964, -0.5353591442108154, 0.8316168785095215, -0.7036926746368408, -0.04960574582219124, 0.4208279550075531, 0.10196320712566376, -0.08344695717096329, 0.35625970363616943, -0.20661404728889465, 0.788693368434906, 0.757088303565979, -0.46449172496795654, 0.5633224844932556, 0.4015767574310303, -0.22500289976596832, 0.7283639907836914, -0.46587705612182617, -0.31165704131126404, 0.32111576199531555, -0.035942379385232925, -0.8297317028045654, -0.4710504710674286, 0.04050510749220848, -0.614852786064148, -0.08191970735788345, 0.3775486648082733, -0.30434077978134155, -0.799953281879425, -0.9497629404067993, 0.3103370666503906, 0.6743605136871338, -0.41952237486839294, -0.17581585049629211, 0.03762359172105789, 0.13393090665340424, -0.8260483741760254, -0.8429703712463379, -0.47221389412879944, -0.2123367339372635, -0.6783614754676819, 0.3297587037086487, -0.2780296802520752, -0.2712480425834656, -0.10342688113451004, -0.24049630761146545, -0.3000057637691498, -0.01649315282702446, 0.1258944422006607, 0.7094789743423462, -0.37363991141319275, -0.31593433022499084, -0.26370546221733093, -0.18553005158901215, 0.1883053034543991, -0.0808059349656105, 0.3839780390262604, -0.44830384850502014, -0.3727529048919678, -0.4557141363620758, -0.028713509440422058, 0.7095775604248047, -0.03697965666651726, 0.7845392823219299, 0.4538290798664093, -0.30144616961479187, -0.007073040120303631, -0.31150102615356445, -0.2755325138568878, -0.5852405428886414, 0.2813131511211395, -0.523666501045227, -1.020277738571167, 0.8111626505851746, 0.5790021419525146, 0.08628560602664948, 1.129672884941101, 0.6350032091140747, -0.30418556928634644, 0.9952957630157471, 0.03505299985408783, 0.3600040674209595, 0.4326040744781494, -0.7367802858352661, 0.11540167778730392, -0.9261899590492249, -0.3419182002544403, -0.5621529221534729, -0.47343093156814575, -0.7000163793563843, -0.09581241011619568, 0.28363722562789917, 0.1772882044315338, -0.6909106373786926, 0.6198158264160156, -0.841661810874939, 0.5747556090354919, 0.6037354469299316, 0.27565106749534607, 0.16595624387264252, -0.1609564870595932, -0.38193175196647644, -0.12615938484668732, -0.46286848187446594, -0.21681298315525055, 1.2366567850112915, 0.2304581105709076, 0.7021136283874512, 0.05764588341116905, 0.8841329216957092, 0.0884467139840126, -0.07502778619527817, -0.5771152973175049, 0.6696151494979858, 0.13288713991641998, -0.8641019463539124, -0.4520204961299896, -0.4914921820163727, -1.108394742012024, 0.4145181477069855, -0.14970673620700836, -0.8602944016456604, 0.1326342225074768, 0.040430065244436264, -0.20301033556461334, 0.4927385449409485, -0.564417839050293, 0.875623881816864, -0.12534567713737488, -0.5122731328010559, 0.09640508890151978, -0.829889178276062, 0.46119093894958496, 0.22422438859939575, 0.23278211057186127, 0.04895300790667534, 0.24597913026809692, 1.175173044204712, -0.8548458814620972, 0.4000602960586548, 0.07176729291677475, 0.055981528013944626, 0.33957168459892273, -0.17532978951931, 0.4858599603176117, 0.04947126284241676, -0.00044951250310987234, -0.10117131471633911, 0.3131505250930786, -0.8672181963920593, -0.11592128872871399, 0.9138187170028687, -0.9568875432014465, -0.59605872631073, -0.8660553693771362, -0.5313080549240112, 0.07928746938705444, 0.5648377537727356, 0.4192563593387604, 0.5479426980018616, 0.02701057679951191, 0.4498252868652344, 0.8261674046516418, -0.12177626043558121, 0.5967238545417786, 0.2634248733520508, 0.07086431980133057, -0.6511151194572449, 0.8589162826538086, 0.1342947632074356, 0.36225375533103943, 0.28781676292419434, 0.3969079554080963, -0.5345211625099182, -0.23330852389335632, -0.22824962437152863, 0.4901678264141083, -0.6255314350128174, -0.27508458495140076, -0.34711888432502747, -0.3714751899242401, -0.7839133143424988, -0.6391326785087585, -0.3423563539981842, -0.50130695104599, -0.4718110263347626, -0.5136640071868896, 0.6111035943031311, 0.45316407084465027, -0.38119783997535706, 0.044427771121263504, -0.5208542943000793, 0.2818233072757721, 0.32877567410469055, 0.5207409858703613, -0.37370336055755615, -0.5868692398071289, 0.004621055442839861, -0.1795448362827301, -0.5894589424133301, -0.9886581301689148, 0.369536429643631, -0.057641442865133286, 0.5290852189064026, 0.5854049324989319, 0.05892230570316315, 0.8861889839172363, -0.22937676310539246, 1.0825772285461426, 0.3729199469089508, -0.7426663637161255, 0.7365634441375732, -0.3321242332458496, 0.18510845303535461, 0.6800628304481506, 0.1911291480064392, -0.18139877915382385, -0.714687705039978, -1.336580753326416, -0.8211469054222107, 0.6481156945228577, 0.3853275179862976, -0.27460968494415283, 0.043542228639125824, 0.13831646740436554, -0.2789609730243683, -0.18520045280456543, -0.6746925711631775, -0.9009582996368408, -0.15999847650527954, -0.4854545295238495, 0.1017020046710968, 0.07568208873271942, -0.3833979070186615, -0.8416441082954407, 0.9288250207901001, 0.0188511461019516, 0.5883219838142395, 0.48203274607658386, 0.05804593488574028, 0.08001638203859329, 0.46506789326667786, 0.8913332223892212, 0.7408530116081238, -0.4855165481567383, 0.39862769842147827, 0.39233890175819397, -1.0542454719543457, 0.46144989132881165, 0.2958785891532898, -0.08499559760093689, -0.026242299005389214, 0.49630728363990784, 0.45868468284606934, 0.058320678770542145, -0.2206355631351471, 0.612658679485321, -0.012873203493654728, -0.5657877326011658, -0.3756676912307739, 0.13586382567882538, -0.1296757012605667, 0.029612820595502853, 0.40895387530326843, -0.17975369095802307, -0.01706014946103096, -0.5203363299369812, 0.4569412171840668, 0.39074796438217163, -0.46555691957473755, -0.14812956750392914, 0.7164848446846008, -0.20588776469230652, -0.19617688655853271, 0.33276495337486267, -0.1701747179031372, -0.6018823981285095, 1.141544222831726, 0.5728656649589539, 0.7086728811264038, -0.24405400454998016, -0.06484934687614441, 0.9082794785499573, 0.3848021626472473, -0.03721613064408302, 0.4927634000778198, 0.30474594235420227, -0.2642001807689667, 0.19061824679374695, -0.8705454468727112, -0.06760990619659424, 0.1702369749546051, -0.8183147311210632, 0.33087819814682007, -0.5154514312744141, -0.19385266304016113, 0.01677660271525383, 0.44265270233154297, -0.48321712017059326, 0.5522538423538208, -0.4193832576274872, 1.2390130758285522, -0.9905498027801514, 0.7082827091217041, 0.7289104461669922, -0.5839327573776245, -1.043697714805603, -0.544869065284729, 0.02085869014263153, -0.7933672666549683, 0.5412194728851318, -0.041264478117227554, 0.18035152554512024, -0.0908699706196785, -0.7158597111701965, -0.8669264912605286, 1.4171342849731445, -0.04058288037776947, -0.4416196346282959, 0.22494782507419586, -0.09558187425136566, 0.4760480225086212, 0.1506137251853943, 0.6375246644020081, 0.7786157727241516, 0.812725305557251, -0.0819058045744896, -0.7701563239097595, 0.30734050273895264, -0.5123633742332458, -0.31253373622894287, 0.4317470192909241, -0.9378212094306946, 1.1571911573410034, 0.015445796772837639, 0.21867357194423676, -0.1711549460887909, 0.6605071425437927, 0.8377434611320496, 0.281268447637558, 0.3387279510498047, 0.895229697227478, 0.8307313919067383, -0.45486557483673096, 1.0607500076293945, -0.20309700071811676, 0.8606370091438293, 0.7087851762771606, 0.24599818885326385, 0.7851396799087524, 0.6711359620094299, -0.5965755581855774, 0.5713692307472229, 0.825745701789856, -0.31518062949180603, 0.4100644290447235, 0.24800986051559448, -0.12200239300727844, -0.13164405524730682, 0.45394933223724365, -0.8710169792175293, 0.09799392521381378, 0.0731721818447113, -0.3263372480869293, 0.09791915118694305, -0.4325430393218994, 0.29808738827705383, -0.11185497790575027, -0.07728195935487747, 0.3824632465839386, 0.03363621234893799, -0.4694695770740509, 0.9681230187416077, -0.1860782504081726, 0.7697473168373108, -0.5468028783798218, -0.06847522407770157, -0.3975788354873657, 0.5949088335037231, -0.4583989679813385, -1.0533090829849243, 0.16851946711540222, 0.05921920761466026, -0.13628362119197845, -0.14386184513568878, 0.6908006072044373, -0.19384914636611938, -0.7728117108345032, 0.1492147296667099, 0.04962502047419548, 0.1260528266429901, 0.507091224193573, -0.688609778881073, -0.3418594300746918, -0.07148422300815582, -0.600264847278595, 0.15566448867321014, 0.29879918694496155, 0.28433698415756226, 0.5324094295501709, 0.6223616600036621, 0.17131923139095306, 0.46060508489608765, -0.523445725440979, 0.7866098284721375, -1.053658127784729, -0.7071926593780518, -0.9272679090499878, 0.47858136892318726, -0.34335869550704956, -0.8862890005111694, 1.0330506563186646, 1.0488550662994385, 0.8565028309822083, -0.016618702560663223, 0.6262670755386353, -0.3669770359992981, 0.2520737051963806, -0.39338064193725586, 0.905320405960083, -0.8769485950469971, -0.23320512473583221, -0.27440503239631653, -0.7216720581054688, -0.39519408345222473, 0.8253534436225891, -0.17768831551074982, 0.023484166711568832, 1.0805256366729736, 0.7133790850639343, -0.09226523339748383, 0.02893310971558094, -0.024163711816072464, 0.5691460967063904, 0.37587329745292664, 0.9709432125091553, 0.6186363697052002, -0.8028984665870667, 0.3508550226688385, -0.493242084980011, -0.4406052231788635, -0.41769781708717346, -0.4521680772304535, -0.850316047668457, -0.47609081864356995, -0.22213011980056763, -0.6437618136405945, -0.1270853877067566, 0.9834230542182922, 0.4685484766960144, -0.9390105605125427, -0.39950376749038696, -0.15345598757266998, 0.14524713158607483, -0.6022387146949768, -0.4224988520145416, 0.7329373359680176, -0.11849572509527206, -0.5803882479667664, 0.19774550199508667, -0.10511419177055359, 0.1887049525976181, 0.06874071061611176, -0.411544531583786, -0.7260767817497253, 0.03165166452527046, 0.4263761341571808, 0.36925607919692993, -0.6876277327537537, -0.6941577792167664, 0.3121040463447571, -0.5010957717895508, 0.4260252118110657, -0.021732855588197708, -0.5155288577079773, 0.09566010534763336, 0.7129045128822327, 0.46092653274536133, 0.7141688466072083, -0.07098481804132462, 0.08568213880062103, -0.6419453024864197, 0.16360187530517578, -0.020829282701015472, 0.29235318303108215, -0.023952824994921684, -0.3192354440689087, 0.7705848813056946, 0.6865704655647278, -0.5408203601837158, -1.0908939838409424, -0.43228837847709656, -1.4263790845870972, -0.046569328755140305, 1.1297715902328491, 0.01250456366688013, -0.5018185377120972, 0.2077498435974121, -0.1319054216146469, 0.2171562761068344, -0.2971726357936859, 0.7382597327232361, 0.787308394908905, -0.40059661865234375, 0.06383919715881348, -0.650520920753479, 0.36643484234809875, 0.5241553783416748, -1.1566284894943237, -0.0832374095916748, 0.26665058732032776, 0.3272208273410797, 0.3772325813770294, 0.6803656220436096, -0.11558537185192108, 0.29854124784469604, 0.2625108063220978, 0.05994633585214615, -0.01313822716474533, 0.01786269061267376, -0.22625690698623657, 0.09495854377746582, -0.2593735456466675, -0.45418086647987366 ]
open-llm-leaderboard/details_TheBloke__guanaco-13B-HF
open-llm-leaderboard
2023-10-23T02:23:46Z
201
0
[ "region:us" ]
null
2023-08-18T11:28:11Z
--- pretty_name: Evaluation run of TheBloke/guanaco-13B-HF dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/guanaco-13B-HF](https://huggingface.co/TheBloke/guanaco-13B-HF) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__guanaco-13B-HF\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-23T02:23:34.396726](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__guanaco-13B-HF/blob/main/results_2023-10-23T02-23-34.396726.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.003984899328859061,\n\ \ \"em_stderr\": 0.0006451805848102414,\n \"f1\": 0.06359479865771825,\n\ \ \"f1_stderr\": 0.001462243147092022,\n \"acc\": 0.422835936195714,\n\ \ \"acc_stderr\": 0.009899837599397724\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.003984899328859061,\n \"em_stderr\": 0.0006451805848102414,\n\ \ \"f1\": 0.06359479865771825,\n \"f1_stderr\": 0.001462243147092022\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08718726307808947,\n \ \ \"acc_stderr\": 0.007770691416783571\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7584846093133386,\n \"acc_stderr\": 0.012028983782011875\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/guanaco-13B-HF leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|arc:challenge|25_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T19:24:37.744515.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_23T02_23_34.396726 path: - '**/details_harness|drop|3_2023-10-23T02-23-34.396726.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-23T02-23-34.396726.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_23T02_23_34.396726 path: - '**/details_harness|gsm8k|5_2023-10-23T02-23-34.396726.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-23T02-23-34.396726.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hellaswag|10_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:24:37.744515.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:24:37.744515.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T19_24_37.744515 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T19:24:37.744515.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T19:24:37.744515.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_23T02_23_34.396726 path: - '**/details_harness|winogrande|5_2023-10-23T02-23-34.396726.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-23T02-23-34.396726.parquet' - config_name: results data_files: - split: 2023_07_19T19_24_37.744515 path: - results_2023-07-19T19:24:37.744515.parquet - split: 2023_10_23T02_23_34.396726 path: - results_2023-10-23T02-23-34.396726.parquet - split: latest path: - results_2023-10-23T02-23-34.396726.parquet --- # Dataset Card for Evaluation run of TheBloke/guanaco-13B-HF ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/guanaco-13B-HF - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/guanaco-13B-HF](https://huggingface.co/TheBloke/guanaco-13B-HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__guanaco-13B-HF", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T02:23:34.396726](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__guanaco-13B-HF/blob/main/results_2023-10-23T02-23-34.396726.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.003984899328859061, "em_stderr": 0.0006451805848102414, "f1": 0.06359479865771825, "f1_stderr": 0.001462243147092022, "acc": 0.422835936195714, "acc_stderr": 0.009899837599397724 }, "harness|drop|3": { "em": 0.003984899328859061, "em_stderr": 0.0006451805848102414, "f1": 0.06359479865771825, "f1_stderr": 0.001462243147092022 }, "harness|gsm8k|5": { "acc": 0.08718726307808947, "acc_stderr": 0.007770691416783571 }, "harness|winogrande|5": { "acc": 0.7584846093133386, "acc_stderr": 0.012028983782011875 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3154408633708954, -0.6951513886451721, 0.20501846075057983, 0.3138194978237152, -0.1641988456249237, 0.12537075579166412, -0.43078890442848206, -0.22449228167533875, 0.46739330887794495, 0.49886199831962585, -0.6793897747993469, -0.9745489358901978, -0.6113597750663757, 0.13521184027194977, -0.1301363855600357, 1.1457632780075073, -0.17723022401332855, -0.27845653891563416, 0.02392653562128544, -0.2611647844314575, -0.42440664768218994, -0.3978089690208435, -0.5381190776824951, -0.49663588404655457, 0.47567659616470337, 0.5813397765159607, 0.34122031927108765, 0.672291100025177, 0.6667644381523132, 0.3734521269798279, -0.1622811257839203, 0.28206783533096313, -0.4408513307571411, -0.06952118873596191, 0.19281873106956482, -0.5186118483543396, -0.7364649772644043, 0.0364106185734272, 0.6110561490058899, 0.38364848494529724, -0.29710105061531067, 0.5882463455200195, 0.15648645162582397, 0.5392402410507202, -0.5598410367965698, 0.3803534209728241, -0.4209352135658264, -0.06632911413908005, -0.4237135052680969, -0.17761573195457458, -0.07056586444377899, -0.3923160135746002, -0.2292288988828659, -0.574699878692627, 0.14768606424331665, 0.16117581725120544, 1.203787922859192, 0.05789220705628395, -0.10202815383672714, -0.2968595623970032, -0.28884488344192505, 0.841705858707428, -0.9177128672599792, -0.08390092849731445, 0.7540105581283569, 0.16595451533794403, -0.28634992241859436, -0.5889065861701965, -0.3877386450767517, -0.13217200338840485, -0.31444233655929565, 0.21615664660930634, -0.05063813924789429, -0.09749910235404968, 0.28047671914100647, 0.6991388201713562, -0.7369881272315979, 0.052391353994607925, -0.5918096303939819, -0.19949255883693695, 1.0062929391860962, 0.4364488124847412, 0.05262494087219238, -0.6038122773170471, -0.3334594666957855, -0.2895970046520233, -0.4584030210971832, 0.21164456009864807, 0.4672352075576782, 0.5056512355804443, -0.6504009962081909, 0.7757605910301208, -0.4552125036716461, 0.6029260754585266, -0.11125410348176956, -0.25920286774635315, 0.8083071112632751, -0.6093621850013733, -0.3725016713142395, -0.03610173612833023, 1.1391544342041016, 0.3302435278892517, 0.05952483043074608, 0.25111180543899536, -0.21260951459407806, -0.12805475294589996, 0.1048923209309578, -0.7074003219604492, -0.15145686268806458, 0.41207584738731384, -0.5485109090805054, -0.4475213289260864, 0.24079664051532745, -0.9790229201316833, -0.15657563507556915, -0.19565127789974213, 0.24960754811763763, -0.17724591493606567, -0.3278711140155792, -0.06964950263500214, -0.054151538759469986, 0.2313665747642517, 0.15576066076755524, -0.7037857174873352, 0.31431475281715393, 0.585926353931427, 1.0736979246139526, -0.0412561371922493, -0.41828635334968567, -0.3519587218761444, -0.13994154334068298, -0.16936753690242767, 0.5428360104560852, -0.16803564131259918, -0.4724498689174652, -0.09529650956392288, 0.35982292890548706, -0.4062056839466095, -0.650078535079956, 0.7442547678947449, -0.18966323137283325, 0.1839381903409958, -0.29188427329063416, -0.3837728500366211, -0.11883886903524399, 0.38192906975746155, -0.6347633004188538, 1.467245101928711, 0.36942633986473083, -0.7738419771194458, 0.062037449330091476, -0.834312379360199, -0.2962035536766052, 0.09581077098846436, 0.029014330357313156, -0.6154971122741699, -0.12464728206396103, 0.1039261594414711, 0.5682411789894104, -0.3921903371810913, 0.050085727125406265, -0.24955596029758453, -0.3854863941669464, 0.07799921184778214, -0.04783015698194504, 1.1638338565826416, 0.2610642611980438, -0.4292772114276886, 0.15441308915615082, -1.0032038688659668, 0.11873184889554977, 0.34733888506889343, -0.5682928562164307, -0.23627126216888428, -0.327651709318161, 0.16567374765872955, 0.12095172703266144, 0.4881965219974518, -0.5192207098007202, 0.3277066946029663, -0.14194348454475403, 0.37513330578804016, 0.9523679614067078, 0.01828876882791519, 0.21803344786167145, -0.42202386260032654, 0.5750623345375061, -0.041363950818777084, 0.36727583408355713, 0.07415775209665298, -0.6012779474258423, -0.7520089745521545, -0.32915523648262024, 0.1728481650352478, 0.6956429481506348, -0.34666380286216736, 0.8308513760566711, -0.3310348391532898, -0.7212923169136047, -0.756923258304596, 0.15631650388240814, 0.4129311740398407, 0.5771562457084656, 0.3869395852088928, -0.20843186974525452, -0.712335467338562, -1.003392219543457, 0.1202281042933464, -0.298620343208313, 0.08151017874479294, 0.4663894772529602, 1.0337023735046387, -0.3057258725166321, 0.6186849474906921, -0.7449364066123962, -0.4060169458389282, -0.2105373740196228, 0.1098489761352539, 0.7139806151390076, 0.5588840842247009, 0.42921173572540283, -0.5650147795677185, -0.39851483702659607, 0.18822087347507477, -0.8397306799888611, -0.25737816095352173, -0.13946005702018738, -0.3178241550922394, 0.283823162317276, -0.10698790103197098, -0.6620626449584961, 0.595348596572876, 0.5421600341796875, -0.5500235557556152, 0.7008196711540222, -0.12444360554218292, 0.4355183243751526, -1.1289411783218384, 0.16516521573066711, 0.023926066234707832, 0.054137222468853, -0.38420313596725464, -0.09398612380027771, 0.04687923938035965, 0.31767702102661133, -0.3760509192943573, 0.6527884006500244, -0.43604332208633423, -0.14281363785266876, 0.023243950679898262, 0.03709550201892853, -0.008522256277501583, 0.5590434074401855, -0.3202255070209503, 0.8142735958099365, 0.45198187232017517, -0.3347823917865753, 0.4537893831729889, 0.4716913402080536, -0.6345061659812927, 0.2757554054260254, -0.6012324690818787, -0.021820781752467155, 0.2485036551952362, 0.10376782715320587, -0.902500569820404, -0.43605542182922363, 0.4697662591934204, -0.5713629722595215, 0.14134235680103302, -0.2976352572441101, -0.564902663230896, -0.5484080910682678, -0.43556687235832214, 0.16000023484230042, 0.5009964108467102, -0.4578087031841278, 0.3308577239513397, 0.34764614701271057, 0.02185671590268612, -0.6656856536865234, -0.797182559967041, -0.08375799655914307, -0.40198448300361633, -0.7096625566482544, 0.3794326186180115, -0.125116229057312, -0.2885657846927643, 0.009109326638281345, -0.02224934659898281, -0.10537541657686234, 0.10841307789087296, 0.36962929368019104, 0.5363329648971558, -0.005078310612589121, -0.3283858001232147, -0.19350707530975342, -0.01859867386519909, 0.10865555703639984, 0.12001951038837433, 0.6271491646766663, -0.2516646683216095, -0.19316886365413666, -0.2381906807422638, 0.15356674790382385, 0.46592333912849426, -0.28035223484039307, 0.9012019038200378, 0.6851829886436462, -0.28608936071395874, -0.06031358614563942, -0.4293992817401886, 0.13064715266227722, -0.4880870580673218, 0.29595568776130676, -0.3178027272224426, -0.7491306066513062, 0.9546202421188354, 0.367500364780426, 0.0072428504936397076, 0.6449751853942871, 0.7241641879081726, -0.03318243473768234, 0.6448293924331665, 0.17715470492839813, -0.08257937431335449, 0.5204475522041321, -0.7751445174217224, -0.052956096827983856, -1.0414024591445923, -0.5615869164466858, -0.5757348537445068, -0.2983497679233551, -0.7769141793251038, -0.31859609484672546, 0.2384660392999649, 0.1596638709306717, -0.41099634766578674, 0.6015356779098511, -0.6255021095275879, 0.23659473657608032, 0.6917121410369873, 0.27687472105026245, 0.09921950846910477, -0.04976704716682434, -0.08802928030490875, 0.2847960591316223, -0.5028856992721558, -0.35853472352027893, 1.3207881450653076, 0.21512453258037567, 0.6649327278137207, -0.06708157807588577, 0.9429725408554077, 0.33139768242836, 0.2739121913909912, -0.6117761135101318, 0.5966227650642395, 0.022661203518509865, -0.6189194917678833, -0.1995396912097931, -0.5655956864356995, -0.9485413432121277, 0.22132401168346405, -0.053443532437086105, -0.9040629267692566, 0.190005362033844, 0.00176598085090518, -0.10201598703861237, 0.2876518964767456, -0.6110297441482544, 0.8811891078948975, -0.22063636779785156, -0.49842357635498047, -0.032136231660842896, -0.7504380941390991, 0.4107433259487152, 0.06439477950334549, 0.37112340331077576, -0.27358588576316833, 0.09964077174663544, 1.077103614807129, -0.6899240612983704, 0.7041085362434387, -0.20545971393585205, 0.039948124438524246, 0.48722097277641296, -0.3418974280357361, 0.6472306251525879, -0.017696339637041092, -0.23874181509017944, 0.44912615418434143, -0.22751076519489288, -0.36899858713150024, -0.3356097936630249, 0.9118593335151672, -0.8021048307418823, -0.3705637454986572, -0.4445575177669525, -0.6208217740058899, 0.24956542253494263, 0.2428765445947647, 0.4108363389968872, 0.29203423857688904, 0.046431511640548706, 0.2572404444217682, 0.27569639682769775, -0.12157134711742401, 0.5456668138504028, 0.3247586488723755, -0.11501799523830414, -0.7569295763969421, 0.6417886018753052, 0.2983623743057251, 0.048941757529973984, 0.20145845413208008, 0.08444363623857498, -0.47557276487350464, -0.51026451587677, -0.37441375851631165, 0.2952137887477875, -0.5166650414466858, -0.3778790533542633, -0.37646758556365967, -0.19448541104793549, -0.4042477607727051, -0.0010899275075644255, -0.36493241786956787, -0.4211863875389099, -0.3905334174633026, -0.29078394174575806, 0.6286851167678833, 0.5196095108985901, -0.41808491945266724, 0.39620107412338257, -0.7422782182693481, 0.11732211709022522, -0.09449273347854614, 0.33169475197792053, -0.08868612349033356, -0.6024876832962036, -0.6176212430000305, 0.11138791590929031, -0.3702907860279083, -0.9029501676559448, 0.6336938142776489, -0.028178803622722626, 0.6579764485359192, 0.16452571749687195, 0.0687786266207695, 0.8566035628318787, -0.25416669249534607, 0.9718483090400696, 0.01887899450957775, -0.7273046970367432, 0.8188234567642212, -0.4253285527229309, 0.12983094155788422, 0.5638352632522583, 0.2426832765340805, -0.40135306119918823, -0.25774508714675903, -0.8273109197616577, -1.066299319267273, 0.9112076759338379, 0.582302987575531, -0.3132641017436981, 0.07547561824321747, 0.26678553223609924, -0.1255650669336319, 0.17694717645645142, -0.6788260340690613, -0.6700506210327148, -0.1732492744922638, -0.33869659900665283, 0.020529206842184067, -0.000025687773813842796, -0.4904820919036865, -0.5062775015830994, 0.8392187356948853, 0.04472590237855911, 0.49765995144844055, 0.17925411462783813, -0.07409489154815674, -0.07421064376831055, 0.25637301802635193, 0.5369324088096619, 0.8164089322090149, -0.4946914315223694, -0.14460165798664093, 0.20778629183769226, -0.668127179145813, 0.11553345620632172, 0.2704634368419647, -0.08251801133155823, -0.06823624670505524, 0.6808510422706604, 1.0251442193984985, 0.03166409581899643, -0.4321820139884949, 0.4563049376010895, 0.16764260828495026, -0.3330526351928711, -0.49552619457244873, 0.14565356075763702, -0.026934444904327393, 0.3402475118637085, 0.5175099968910217, -0.12390518933534622, 0.007613898254930973, -0.33337530493736267, 0.19181184470653534, 0.2639079689979553, -0.03407201170921326, -0.3390219807624817, 0.5701515078544617, -0.04121309146285057, -0.3639603853225708, 0.72247314453125, -0.10847090929746628, -0.4984009861946106, 1.1290704011917114, 0.36389586329460144, 0.8957840204238892, -0.15788553655147552, 0.16891354322433472, 0.6409611701965332, 0.429514080286026, -0.12044132500886917, 0.4931231439113617, 0.0867471843957901, -0.5845227241516113, -0.32360410690307617, -0.7520728707313538, -0.17145822942256927, 0.3855905830860138, -1.134181261062622, 0.33497682213783264, -0.1823548823595047, -0.2762796878814697, -0.19651058316230774, 0.3995119631290436, -0.8255138993263245, 0.24682258069515228, -0.0028475485742092133, 0.8614208102226257, -1.055696964263916, 0.7398537397384644, 0.8388323187828064, -0.5469360947608948, -0.8753699660301208, -0.32172635197639465, 0.0356290377676487, -0.9041898846626282, 0.46672508120536804, 0.19566503167152405, 0.4045920968055725, -0.2239975780248642, -0.6301486492156982, -0.9450854659080505, 1.5450483560562134, 0.13771910965442657, -0.5967628955841064, 0.10135195404291153, 0.12193853408098221, 0.3697722256183624, -0.34059688448905945, 0.5928946733474731, 0.7675533294677734, 0.788759171962738, -0.09264367818832397, -0.952972412109375, 0.2684175372123718, -0.4565412104129791, -0.06529050320386887, 0.23597867786884308, -0.9737838506698608, 0.980430006980896, -0.1426030546426773, 0.0231415294110775, -0.014944278635084629, 0.42917734384536743, 0.5912191271781921, 0.33263158798217773, 0.3885825276374817, 0.770695149898529, 0.7755151391029358, -0.3864256739616394, 1.0340702533721924, -0.18757431209087372, 0.90385502576828, 1.1155409812927246, 0.029146309942007065, 0.7365889549255371, 0.3862229585647583, -0.5081987380981445, 0.6088436841964722, 0.8896679878234863, -0.3931078612804413, 0.5416421294212341, 0.14171993732452393, -0.12640851736068726, -0.02258412539958954, 0.0011984268203377724, -0.4634604752063751, 0.44455698132514954, 0.19053281843662262, -0.4929055869579315, -0.14567524194717407, -0.22383059561252594, 0.131461039185524, -0.24513037502765656, -0.3126182556152344, 0.5378960371017456, -0.025377245619893074, -0.4435266852378845, 0.8225249648094177, -0.14511875808238983, 0.7141136527061462, -0.7132177948951721, -0.058101244270801544, -0.4084196090698242, 0.17661868035793304, -0.6022053360939026, -0.9470818042755127, 0.28392767906188965, 0.09975413978099823, -0.2875007688999176, -0.09421984106302261, 0.6346579194068909, -0.31469017267227173, -0.5495634078979492, 0.5059592127799988, 0.3876742422580719, 0.22057604789733887, 0.13010524213314056, -0.8898559808731079, 0.30205345153808594, 0.2561877965927124, -0.7819321155548096, 0.366435170173645, 0.27593719959259033, 0.06005902215838432, 0.5795599818229675, 0.6489413976669312, 0.17989711463451385, 0.1665610373020172, -0.07133414596319199, 1.0212372541427612, -0.777256965637207, -0.3584758937358856, -0.8311842679977417, 0.9132573008537292, -0.2872943580150604, -0.64491206407547, 0.8679073452949524, 0.9018840193748474, 0.8483676910400391, 0.025055088102817535, 0.8177744746208191, -0.5385308861732483, 0.4200621247291565, -0.44252198934555054, 0.8753438591957092, -0.7346140742301941, 0.2819395661354065, -0.16892211139202118, -0.8428595662117004, -0.017195234075188637, 0.7300277352333069, -0.18658162653446198, -0.0604008324444294, 0.6364991664886475, 1.0431448221206665, 0.07407823950052261, 0.08567579835653305, -0.036563970148563385, 0.42693060636520386, 0.2608657479286194, 0.6577776670455933, 0.6427589058876038, -0.7294935584068298, 0.5130464434623718, -0.6323896050453186, -0.3997487425804138, -0.14955928921699524, -0.6999971270561218, -0.8054853677749634, -0.5587096214294434, -0.30238744616508484, -0.5549403429031372, 0.06783784925937653, 1.0344700813293457, 0.5553098320960999, -0.9002429842948914, -0.5472680926322937, -0.026814091950654984, 0.16376923024654388, -0.24219910800457, -0.35952404141426086, 0.5848451852798462, -0.09257937222719193, -0.8121094107627869, 0.39862027764320374, -0.13677799701690674, -0.17878012359142303, 0.031731486320495605, -0.2385575920343399, -0.45085883140563965, -0.2788321375846863, 0.4839087724685669, 0.22220584750175476, -0.6232107877731323, -0.2942808270454407, -0.10349319875240326, -0.030626513063907623, 0.33366161584854126, 0.3283967971801758, -0.5173596143722534, 0.033272430300712585, 0.6042037606239319, 0.3055204451084137, 0.7727536559104919, 0.03463783115148544, 0.23484717309474945, -0.6894564628601074, 0.052370913326740265, -0.0356927290558815, 0.48876023292541504, 0.17485687136650085, -0.494951993227005, 1.047481894493103, 0.37499257922172546, -0.768606424331665, -0.9660064578056335, -0.31413301825523376, -1.2303575277328491, 0.019853409379720688, 1.3749728202819824, -0.3105265498161316, -0.39563417434692383, 0.13329152762889862, -0.11172431707382202, 0.32785752415657043, -0.7045294046401978, 0.5494187474250793, 0.7901023030281067, -0.3082733750343323, -0.021521957591176033, -0.6440482139587402, 0.2999360263347626, 0.1015808954834938, -1.040998935699463, -0.035200729966163635, 0.3533998727798462, 0.3658338785171509, 0.20422109961509705, 0.680328905582428, -0.041938960552215576, -0.12623187899589539, -0.05648808926343918, 0.23955443501472473, -0.30218714475631714, -0.16525645554065704, -0.26274800300598145, 0.1065329909324646, -0.43399131298065186, -0.5039762258529663 ]
open-llm-leaderboard/details_TheBloke__robin-33B-v2-fp16
open-llm-leaderboard
2023-08-27T12:34:03Z
201
0
[ "region:us" ]
null
2023-08-18T11:28:19Z
--- pretty_name: Evaluation run of TheBloke/robin-33B-v2-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/robin-33B-v2-fp16](https://huggingface.co/TheBloke/robin-33B-v2-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__robin-33B-v2-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-31T16:41:32.452325](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__robin-33B-v2-fp16/blob/main/results_2023-07-31T16%3A41%3A32.452325.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5493694357469432,\n\ \ \"acc_stderr\": 0.03462857618448208,\n \"acc_norm\": 0.5533043005336739,\n\ \ \"acc_norm_stderr\": 0.03460642548466365,\n \"mc1\": 0.3574051407588739,\n\ \ \"mc1_stderr\": 0.016776599676729398,\n \"mc2\": 0.5388029530988832,\n\ \ \"mc2_stderr\": 0.014742138833066059\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5947098976109215,\n \"acc_stderr\": 0.014346869060229321,\n\ \ \"acc_norm\": 0.6237201365187713,\n \"acc_norm_stderr\": 0.014157022555407156\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6331408086038638,\n\ \ \"acc_stderr\": 0.004809626723626824,\n \"acc_norm\": 0.8362875921131249,\n\ \ \"acc_norm_stderr\": 0.0036925819391622834\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5037037037037037,\n\ \ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.5037037037037037,\n\ \ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n\ \ \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n\ \ \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \ \ \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5207547169811321,\n \"acc_stderr\": 0.030746349975723463,\n\ \ \"acc_norm\": 0.5207547169811321,\n \"acc_norm_stderr\": 0.030746349975723463\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5555555555555556,\n\ \ \"acc_stderr\": 0.041553199555931467,\n \"acc_norm\": 0.5555555555555556,\n\ \ \"acc_norm_stderr\": 0.041553199555931467\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n\ \ \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.48554913294797686,\n\ \ \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.48554913294797686,\n\ \ \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006718,\n\ \ \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006718\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n\ \ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.451063829787234,\n \"acc_stderr\": 0.032529096196131965,\n\ \ \"acc_norm\": 0.451063829787234,\n \"acc_norm_stderr\": 0.032529096196131965\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3684210526315789,\n\ \ \"acc_stderr\": 0.04537815354939392,\n \"acc_norm\": 0.3684210526315789,\n\ \ \"acc_norm_stderr\": 0.04537815354939392\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.47586206896551725,\n \"acc_stderr\": 0.0416180850350153,\n\ \ \"acc_norm\": 0.47586206896551725,\n \"acc_norm_stderr\": 0.0416180850350153\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.328042328042328,\n \"acc_stderr\": 0.024180497164376896,\n \"\ acc_norm\": 0.328042328042328,\n \"acc_norm_stderr\": 0.024180497164376896\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\ \ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\ \ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6161290322580645,\n\ \ \"acc_stderr\": 0.027666182075539638,\n \"acc_norm\": 0.6161290322580645,\n\ \ \"acc_norm_stderr\": 0.027666182075539638\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.37438423645320196,\n \"acc_stderr\": 0.03405155380561953,\n\ \ \"acc_norm\": 0.37438423645320196,\n \"acc_norm_stderr\": 0.03405155380561953\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\"\ : 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885416,\n\ \ \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885416\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7222222222222222,\n \"acc_stderr\": 0.03191178226713547,\n \"\ acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.03191178226713547\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.772020725388601,\n \"acc_stderr\": 0.030276909945178267,\n\ \ \"acc_norm\": 0.772020725388601,\n \"acc_norm_stderr\": 0.030276909945178267\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5025641025641026,\n \"acc_stderr\": 0.025350672979412202,\n\ \ \"acc_norm\": 0.5025641025641026,\n \"acc_norm_stderr\": 0.025350672979412202\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \ \ \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5630252100840336,\n \"acc_stderr\": 0.03221943636566196,\n \ \ \"acc_norm\": 0.5630252100840336,\n \"acc_norm_stderr\": 0.03221943636566196\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"\ acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7339449541284404,\n \"acc_stderr\": 0.018946022322225607,\n \"\ acc_norm\": 0.7339449541284404,\n \"acc_norm_stderr\": 0.018946022322225607\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896078,\n \"\ acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896078\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7598039215686274,\n \"acc_stderr\": 0.02998373305591361,\n \"\ acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.02998373305591361\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7763713080168776,\n \"acc_stderr\": 0.027123298205229966,\n \ \ \"acc_norm\": 0.7763713080168776,\n \"acc_norm_stderr\": 0.027123298205229966\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6188340807174888,\n\ \ \"acc_stderr\": 0.03259625118416827,\n \"acc_norm\": 0.6188340807174888,\n\ \ \"acc_norm_stderr\": 0.03259625118416827\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6106870229007634,\n \"acc_stderr\": 0.04276486542814591,\n\ \ \"acc_norm\": 0.6106870229007634,\n \"acc_norm_stderr\": 0.04276486542814591\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"\ acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6666666666666666,\n\ \ \"acc_stderr\": 0.04557239513497751,\n \"acc_norm\": 0.6666666666666666,\n\ \ \"acc_norm_stderr\": 0.04557239513497751\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.036230899157241474,\n\ \ \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.036230899157241474\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3392857142857143,\n\ \ \"acc_stderr\": 0.04493949068613539,\n \"acc_norm\": 0.3392857142857143,\n\ \ \"acc_norm_stderr\": 0.04493949068613539\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6310679611650486,\n \"acc_stderr\": 0.0477761518115674,\n\ \ \"acc_norm\": 0.6310679611650486,\n \"acc_norm_stderr\": 0.0477761518115674\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n\ \ \"acc_stderr\": 0.023902325549560396,\n \"acc_norm\": 0.8418803418803419,\n\ \ \"acc_norm_stderr\": 0.023902325549560396\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \ \ \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7305236270753512,\n\ \ \"acc_stderr\": 0.01586624307321506,\n \"acc_norm\": 0.7305236270753512,\n\ \ \"acc_norm_stderr\": 0.01586624307321506\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.02642481659400985,\n\ \ \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.02642481659400985\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.26033519553072626,\n\ \ \"acc_stderr\": 0.014676252009319476,\n \"acc_norm\": 0.26033519553072626,\n\ \ \"acc_norm_stderr\": 0.014676252009319476\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.02818059632825929,\n\ \ \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.02818059632825929\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6302250803858521,\n\ \ \"acc_stderr\": 0.027417996705630998,\n \"acc_norm\": 0.6302250803858521,\n\ \ \"acc_norm_stderr\": 0.027417996705630998\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6080246913580247,\n \"acc_stderr\": 0.027163686038271146,\n\ \ \"acc_norm\": 0.6080246913580247,\n \"acc_norm_stderr\": 0.027163686038271146\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \ \ \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41264667535853977,\n\ \ \"acc_stderr\": 0.012573836633799015,\n \"acc_norm\": 0.41264667535853977,\n\ \ \"acc_norm_stderr\": 0.012573836633799015\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5404411764705882,\n \"acc_stderr\": 0.03027332507734575,\n\ \ \"acc_norm\": 0.5404411764705882,\n \"acc_norm_stderr\": 0.03027332507734575\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5571895424836601,\n \"acc_stderr\": 0.020095083154577347,\n \ \ \"acc_norm\": 0.5571895424836601,\n \"acc_norm_stderr\": 0.020095083154577347\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.031680911612338825,\n\ \ \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.031680911612338825\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7661691542288557,\n\ \ \"acc_stderr\": 0.02992941540834839,\n \"acc_norm\": 0.7661691542288557,\n\ \ \"acc_norm_stderr\": 0.02992941540834839\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \ \ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n\ \ \"acc_stderr\": 0.03889951252827217,\n \"acc_norm\": 0.4819277108433735,\n\ \ \"acc_norm_stderr\": 0.03889951252827217\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.032744852119469564,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.032744852119469564\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3574051407588739,\n\ \ \"mc1_stderr\": 0.016776599676729398,\n \"mc2\": 0.5388029530988832,\n\ \ \"mc2_stderr\": 0.014742138833066059\n }\n}\n```" repo_url: https://huggingface.co/TheBloke/robin-33B-v2-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|arc:challenge|25_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hellaswag|10_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:41:32.452325.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:41:32.452325.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T16_41_32.452325 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T16:41:32.452325.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T16:41:32.452325.parquet' - config_name: results data_files: - split: 2023_07_31T16_41_32.452325 path: - results_2023-07-31T16:41:32.452325.parquet - split: latest path: - results_2023-07-31T16:41:32.452325.parquet --- # Dataset Card for Evaluation run of TheBloke/robin-33B-v2-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/robin-33B-v2-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/robin-33B-v2-fp16](https://huggingface.co/TheBloke/robin-33B-v2-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__robin-33B-v2-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-31T16:41:32.452325](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__robin-33B-v2-fp16/blob/main/results_2023-07-31T16%3A41%3A32.452325.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5493694357469432, "acc_stderr": 0.03462857618448208, "acc_norm": 0.5533043005336739, "acc_norm_stderr": 0.03460642548466365, "mc1": 0.3574051407588739, "mc1_stderr": 0.016776599676729398, "mc2": 0.5388029530988832, "mc2_stderr": 0.014742138833066059 }, "harness|arc:challenge|25": { "acc": 0.5947098976109215, "acc_stderr": 0.014346869060229321, "acc_norm": 0.6237201365187713, "acc_norm_stderr": 0.014157022555407156 }, "harness|hellaswag|10": { "acc": 0.6331408086038638, "acc_stderr": 0.004809626723626824, "acc_norm": 0.8362875921131249, "acc_norm_stderr": 0.0036925819391622834 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5037037037037037, "acc_stderr": 0.04319223625811331, "acc_norm": 0.5037037037037037, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.04033565667848319, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5207547169811321, "acc_stderr": 0.030746349975723463, "acc_norm": 0.5207547169811321, "acc_norm_stderr": 0.030746349975723463 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5555555555555556, "acc_stderr": 0.041553199555931467, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.041553199555931467 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.451063829787234, "acc_stderr": 0.032529096196131965, "acc_norm": 0.451063829787234, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939392, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939392 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.0416180850350153, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.328042328042328, "acc_stderr": 0.024180497164376896, "acc_norm": 0.328042328042328, "acc_norm_stderr": 0.024180497164376896 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6161290322580645, "acc_stderr": 0.027666182075539638, "acc_norm": 0.6161290322580645, "acc_norm_stderr": 0.027666182075539638 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.37438423645320196, "acc_stderr": 0.03405155380561953, "acc_norm": 0.37438423645320196, "acc_norm_stderr": 0.03405155380561953 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885416, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885416 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.03191178226713547, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.03191178226713547 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.772020725388601, "acc_stderr": 0.030276909945178267, "acc_norm": 0.772020725388601, "acc_norm_stderr": 0.030276909945178267 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5025641025641026, "acc_stderr": 0.025350672979412202, "acc_norm": 0.5025641025641026, "acc_norm_stderr": 0.025350672979412202 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5630252100840336, "acc_stderr": 0.03221943636566196, "acc_norm": 0.5630252100840336, "acc_norm_stderr": 0.03221943636566196 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7339449541284404, "acc_stderr": 0.018946022322225607, "acc_norm": 0.7339449541284404, "acc_norm_stderr": 0.018946022322225607 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896078, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7598039215686274, "acc_stderr": 0.02998373305591361, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7763713080168776, "acc_stderr": 0.027123298205229966, "acc_norm": 0.7763713080168776, "acc_norm_stderr": 0.027123298205229966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6188340807174888, "acc_stderr": 0.03259625118416827, "acc_norm": 0.6188340807174888, "acc_norm_stderr": 0.03259625118416827 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.04276486542814591, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04557239513497751, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04557239513497751 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.036230899157241474, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3392857142857143, "acc_stderr": 0.04493949068613539, "acc_norm": 0.3392857142857143, "acc_norm_stderr": 0.04493949068613539 }, "harness|hendrycksTest-management|5": { "acc": 0.6310679611650486, "acc_stderr": 0.0477761518115674, "acc_norm": 0.6310679611650486, "acc_norm_stderr": 0.0477761518115674 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.023902325549560396, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.023902325549560396 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7305236270753512, "acc_stderr": 0.01586624307321506, "acc_norm": 0.7305236270753512, "acc_norm_stderr": 0.01586624307321506 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5953757225433526, "acc_stderr": 0.02642481659400985, "acc_norm": 0.5953757225433526, "acc_norm_stderr": 0.02642481659400985 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.26033519553072626, "acc_stderr": 0.014676252009319476, "acc_norm": 0.26033519553072626, "acc_norm_stderr": 0.014676252009319476 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5882352941176471, "acc_stderr": 0.02818059632825929, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.02818059632825929 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6302250803858521, "acc_stderr": 0.027417996705630998, "acc_norm": 0.6302250803858521, "acc_norm_stderr": 0.027417996705630998 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6080246913580247, "acc_stderr": 0.027163686038271146, "acc_norm": 0.6080246913580247, "acc_norm_stderr": 0.027163686038271146 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.43617021276595747, "acc_stderr": 0.02958345203628407, "acc_norm": 0.43617021276595747, "acc_norm_stderr": 0.02958345203628407 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41264667535853977, "acc_stderr": 0.012573836633799015, "acc_norm": 0.41264667535853977, "acc_norm_stderr": 0.012573836633799015 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5404411764705882, "acc_stderr": 0.03027332507734575, "acc_norm": 0.5404411764705882, "acc_norm_stderr": 0.03027332507734575 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5571895424836601, "acc_stderr": 0.020095083154577347, "acc_norm": 0.5571895424836601, "acc_norm_stderr": 0.020095083154577347 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5714285714285714, "acc_stderr": 0.031680911612338825, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.031680911612338825 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7661691542288557, "acc_stderr": 0.02992941540834839, "acc_norm": 0.7661691542288557, "acc_norm_stderr": 0.02992941540834839 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.03889951252827217, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.03889951252827217 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.032744852119469564, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.032744852119469564 }, "harness|truthfulqa:mc|0": { "mc1": 0.3574051407588739, "mc1_stderr": 0.016776599676729398, "mc2": 0.5388029530988832, "mc2_stderr": 0.014742138833066059 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7122635841369629, -0.8619363307952881, 0.28507480025291443, 0.1664591282606125, -0.15050043165683746, -0.03231373056769371, 0.00742759183049202, -0.2572908103466034, 0.593708336353302, -0.07002562284469604, -0.5415593385696411, -0.6579419374465942, -0.43353646993637085, 0.22590915858745575, -0.04461618512868881, 0.8192315697669983, -0.17076382040977478, -0.1723053902387619, 0.09117868542671204, -0.012076186947524548, -0.24803803861141205, -0.3052702844142914, -0.4774229824542999, -0.3392581045627594, 0.20657336711883545, 0.4759242832660675, 0.4630279839038849, 0.7942814826965332, 0.6921465992927551, 0.2986506223678589, -0.29144933819770813, -0.01856973022222519, -0.17365984618663788, -0.2654774785041809, 0.3685203790664673, -0.31073591113090515, -0.8086341023445129, 0.3058615028858185, 0.7638109922409058, 0.6199034452438354, -0.08996186405420303, 0.3020307421684265, 0.03332320973277092, 0.6366714835166931, -0.3599091172218323, 0.07944346964359283, -0.3079158067703247, 0.21976745128631592, -0.20917648077011108, -0.2446478009223938, -0.28742140531539917, -0.22563670575618744, -0.15778490900993347, -0.8551416397094727, 0.25993967056274414, 0.34488654136657715, 1.590120792388916, -0.1642669439315796, -0.21298715472221375, 0.09562923014163971, -0.11636465042829514, 1.0659303665161133, -0.8689634203910828, 0.3557945191860199, 0.754010021686554, 0.14590783417224884, -0.11160207539796829, -0.5797922611236572, -0.6372951865196228, 0.08593783527612686, -0.3645195960998535, 0.386865496635437, -0.12090721726417542, -0.18114978075027466, 0.34964218735694885, 0.657336950302124, -0.6542230248451233, 0.20390741527080536, -0.6648591756820679, -0.17760048806667328, 1.061350703239441, 0.3160759210586548, 0.06961684674024582, -0.39874446392059326, -0.6633366942405701, -0.6761804223060608, -0.39006316661834717, 0.2853863537311554, 0.4280645549297333, 0.391956090927124, -0.40718957781791687, 0.7132588624954224, -0.3994902968406677, 0.5997974276542664, 0.41387835144996643, 0.03778867796063423, 0.8662073612213135, -0.672344982624054, -0.5358518362045288, -0.07691091299057007, 1.1279056072235107, 0.5563932061195374, 0.10255124419927597, 0.24126014113426208, 0.013467907905578613, -0.1215253621339798, 0.02622751146554947, -0.8398872017860413, -0.27037981152534485, 0.2013878971338272, -0.38127830624580383, -0.4869520366191864, 0.4017159342765808, -0.8793051838874817, 0.17110122740268707, -0.020869271829724312, 0.4030379354953766, -0.5218379497528076, -0.13951009511947632, 0.20714902877807617, -0.4055209159851074, 0.8063647150993347, -0.16412046551704407, -0.7787824869155884, 0.3812505006790161, 0.5362051725387573, 0.753514289855957, -0.0922124907374382, -0.4649192988872528, -0.16656339168548584, -0.08866827934980392, -0.28108108043670654, 0.531170666217804, -0.2740510106086731, -0.3884769380092621, -0.25490590929985046, 0.2841876745223999, -0.2602826952934265, -0.372548907995224, 0.7102928757667542, -0.23267318308353424, 0.20690514147281647, -0.43938812613487244, -0.6616050004959106, 0.11979098618030548, 0.3446318209171295, -0.4155319929122925, 1.2601557970046997, 0.292542040348053, -0.8285456299781799, 0.3920297920703888, -0.6262125968933105, -0.22750818729400635, -0.00873566698282957, -0.04566622152924538, -0.810023307800293, -0.2487199455499649, 0.1868113875389099, 0.4304644763469696, -0.1529577374458313, -0.13515572249889374, -0.3789874315261841, -0.3573319613933563, 0.3651708662509918, -0.1318035125732422, 1.2604286670684814, -0.007539298851042986, -0.7348951697349548, -0.07640204578638077, -1.214084506034851, 0.3412279486656189, 0.19249409437179565, -0.3415243923664093, -0.13631638884544373, -0.46057987213134766, 0.011877238750457764, 0.14558154344558716, 0.26830345392227173, -0.8122747540473938, 0.22023282945156097, -0.32266688346862793, 0.19130273163318634, 1.2459356784820557, 0.050432853400707245, 0.13794951140880585, -0.6000184416770935, 0.5250439643859863, 0.20741404592990875, 0.2155989408493042, 0.3893149793148041, -0.6260747909545898, -0.7592299580574036, -0.5104938745498657, -0.04982776194810867, 0.6265203952789307, -0.20059187710285187, 1.1747242212295532, 0.08877464383840561, -0.8819206953048706, -0.45304036140441895, -0.12521609663963318, 0.4854922294616699, 0.7937026619911194, 0.6130783557891846, -0.008359027095139027, -0.6241617202758789, -1.136733889579773, -0.32890477776527405, -0.20308493077754974, 0.11946861445903778, 0.21984995901584625, 1.0311250686645508, -0.2658902704715729, 0.6095091104507446, -1.0118156671524048, -0.23714901506900787, 0.13145790994167328, -0.06309128552675247, 0.8106880187988281, 0.7522091865539551, 0.5758955478668213, -0.6495490074157715, -0.5700672268867493, 0.19776809215545654, -0.8854252696037292, -0.10290263593196869, 0.10887887328863144, -0.295586496591568, 0.14589649438858032, 0.12370726466178894, -0.6709133982658386, 0.5085532069206238, 0.21234793961048126, -1.0504989624023438, 1.0440614223480225, -0.3196796178817749, 0.5805131196975708, -0.9733573794364929, 0.17625601589679718, -0.041332412511110306, 0.03134191036224365, -0.502316951751709, 0.056437522172927856, 0.11964300274848938, 0.43866536021232605, -0.48779943585395813, 0.786995530128479, -0.7178879976272583, -0.06483138352632523, 0.48639073967933655, 0.13344821333885193, -0.13085491955280304, 0.3318057954311371, -0.2294040471315384, 0.8111347556114197, 0.7541699409484863, -0.4484540820121765, 0.5057163238525391, 0.42678302526474, -0.24282824993133545, 0.7310596108436584, -0.4925177991390228, -0.307006299495697, 0.30810150504112244, -0.11224327981472015, -0.8576744794845581, -0.4814825654029846, 0.04211585223674774, -0.6228805184364319, -0.06910961121320724, 0.38264456391334534, -0.30109599232673645, -0.8374752402305603, -0.9592393040657043, 0.2745186686515808, 0.7728697061538696, -0.409025639295578, -0.18712905049324036, 0.0705934539437294, 0.09840940684080124, -0.8073176741600037, -0.8684031367301941, -0.48368796706199646, -0.18902143836021423, -0.7400614619255066, 0.30927735567092896, -0.28737613558769226, -0.30541902780532837, -0.08946553617715836, -0.23722486197948456, -0.32736554741859436, -0.019351601600646973, 0.11188504099845886, 0.6618214249610901, -0.39680367708206177, -0.314653605222702, -0.21439586579799652, -0.15226390957832336, 0.23444238305091858, -0.07729244232177734, 0.3749537169933319, -0.487201064825058, -0.37176501750946045, -0.45927008986473083, 0.004955355543643236, 0.714500904083252, -0.0854107066988945, 0.7492927312850952, 0.44270896911621094, -0.27955564856529236, -0.012553587555885315, -0.3262542188167572, -0.2689683437347412, -0.5926186442375183, 0.278197705745697, -0.477963387966156, -1.033870816230774, 0.7851671576499939, 0.5785408616065979, 0.0236564539372921, 1.150552749633789, 0.5800904631614685, -0.3699668347835541, 0.9616646766662598, 0.007323882542550564, 0.371278315782547, 0.4248892664909363, -0.7007762789726257, 0.1004112958908081, -0.9523651599884033, -0.29661279916763306, -0.5707181692123413, -0.49010324478149414, -0.7218326926231384, -0.07197847217321396, 0.3071516752243042, 0.1538376659154892, -0.7098484635353088, 0.5576551556587219, -0.829412043094635, 0.5669441223144531, 0.6067584156990051, 0.3170676827430725, 0.13857343792915344, -0.1110503152012825, -0.36892858147621155, -0.15251490473747253, -0.47061440348625183, -0.20947112143039703, 1.217605710029602, 0.2511169910430908, 0.679164707660675, 0.0940963625907898, 0.9103577136993408, 0.12327925115823746, -0.062721386551857, -0.5822192430496216, 0.6519908905029297, 0.09559369832277298, -0.8325909972190857, -0.45746728777885437, -0.52059406042099, -1.0714484453201294, 0.35794201493263245, -0.16236568987369537, -0.849275529384613, 0.12379668653011322, 0.0033841782715171576, -0.16984249651432037, 0.5184805393218994, -0.5342057347297668, 0.8323598504066467, -0.13388866186141968, -0.5297044515609741, 0.07918979972600937, -0.830460786819458, 0.4528358578681946, 0.18574124574661255, 0.3017071783542633, 0.05318243429064751, 0.25984007120132446, 1.2021701335906982, -0.8544927835464478, 0.4347040355205536, 0.051876600831747055, 0.061853934079408646, 0.36728551983833313, -0.16214866936206818, 0.5190433263778687, 0.060992758721113205, -0.0395306721329689, -0.127085343003273, 0.2814045548439026, -0.8879850506782532, -0.04003608226776123, 0.9050502181053162, -0.9651632308959961, -0.5927821397781372, -0.9154496192932129, -0.5507122874259949, 0.060070671141147614, 0.5923101902008057, 0.36937204003334045, 0.510899543762207, 0.018478885293006897, 0.48518529534339905, 0.8384492993354797, -0.1483837515115738, 0.6248404383659363, 0.2622455656528473, 0.08317198604345322, -0.669817328453064, 0.8207882046699524, 0.10282839834690094, 0.37912416458129883, 0.30896517634391785, 0.4404694437980652, -0.5402675271034241, -0.23852983117103577, -0.1844995766878128, 0.5055919289588928, -0.6274888515472412, -0.2843814194202423, -0.3454497754573822, -0.39477255940437317, -0.7359254360198975, -0.6346098780632019, -0.3051872253417969, -0.5129284858703613, -0.4734882116317749, -0.46640250086784363, 0.6189926266670227, 0.49956122040748596, -0.44112294912338257, 0.07041355222463608, -0.5085965394973755, 0.26365926861763, 0.34981870651245117, 0.547537624835968, -0.35545867681503296, -0.5712172985076904, 0.04999850317835808, -0.12096670269966125, -0.5377898216247559, -0.9698991179466248, 0.3291736841201782, -0.03206586837768555, 0.5213366746902466, 0.592951774597168, 0.10088038444519043, 0.8256725072860718, -0.1997271031141281, 1.0447564125061035, 0.37577682733535767, -0.8199161887168884, 0.753312349319458, -0.38513362407684326, 0.17799408733844757, 0.6678823828697205, 0.15877936780452728, -0.18345406651496887, -0.6819630861282349, -1.3165533542633057, -0.7931129932403564, 0.6504743695259094, 0.4062236249446869, -0.2830671966075897, 0.02582571841776371, 0.15367774665355682, -0.30102092027664185, -0.15955278277397156, -0.6723825335502625, -0.8779256343841553, -0.14337922632694244, -0.5087082386016846, 0.10197293013334274, 0.027455486357212067, -0.37737682461738586, -0.8618897795677185, 0.9137863516807556, 0.053318385034799576, 0.6009745597839355, 0.4565650522708893, 0.05027450993657112, 0.014853782020509243, 0.48506641387939453, 0.9144434928894043, 0.790217399597168, -0.5107819437980652, 0.3913666605949402, 0.3554103374481201, -1.0346583127975464, 0.4459824562072754, 0.29824337363243103, -0.095122329890728, -0.035304728895425797, 0.5316447615623474, 0.4061056971549988, 0.056261010468006134, -0.19996480643749237, 0.6270962357521057, 0.037965547293424606, -0.5621320009231567, -0.38480809330940247, 0.10905933380126953, -0.13035431504249573, 0.0003201660292688757, 0.4203403890132904, -0.14926159381866455, -0.049468234181404114, -0.5501163601875305, 0.5054052472114563, 0.3939417898654938, -0.4769931137561798, -0.13278405368328094, 0.740746021270752, -0.2041821926832199, -0.19753555953502655, 0.3767012059688568, -0.16805225610733032, -0.632385790348053, 1.140201210975647, 0.5956965088844299, 0.7020837068557739, -0.25385406613349915, -0.03884372487664223, 0.9289177656173706, 0.41319116950035095, -0.03084886074066162, 0.5655830502510071, 0.3022913634777069, -0.26720812916755676, 0.18814228475093842, -0.8491296768188477, -0.07112595438957214, 0.11040858924388885, -0.8922664523124695, 0.283795028924942, -0.534246027469635, -0.18400859832763672, 0.02210623398423195, 0.44174107909202576, -0.4919297695159912, 0.5286654233932495, -0.4492805302143097, 1.215705156326294, -0.9755421876907349, 0.7185210585594177, 0.7424641251564026, -0.5706391334533691, -1.033942461013794, -0.5176712274551392, -0.017311060801148415, -0.8473610281944275, 0.5667970776557922, -0.07074394822120667, 0.18709000945091248, -0.06472445279359818, -0.7187725901603699, -0.8915613293647766, 1.4054971933364868, -0.08421100676059723, -0.5053472518920898, 0.26025494933128357, -0.04387212172150612, 0.4295172095298767, 0.1308658868074417, 0.5845411419868469, 0.7388452291488647, 0.8974989652633667, -0.08101427555084229, -0.7352991104125977, 0.33838987350463867, -0.4915895462036133, -0.3232788145542145, 0.4444328844547272, -0.9234254956245422, 1.2069817781448364, -0.029967283830046654, 0.2123004049062729, -0.16891247034072876, 0.6463898420333862, 0.8024492859840393, 0.26852548122406006, 0.3497655987739563, 0.8675128817558289, 0.8599235415458679, -0.509274959564209, 1.0561519861221313, -0.22219613194465637, 0.8708269000053406, 0.7107260227203369, 0.22125594317913055, 0.822134256362915, 0.696241557598114, -0.6152390241622925, 0.562772810459137, 0.7838916778564453, -0.3388058543205261, 0.402200847864151, 0.2960421144962311, -0.07638217508792877, -0.1405678540468216, 0.46783214807510376, -0.8693723082542419, 0.09738358855247498, 0.06447206437587738, -0.31673622131347656, 0.10095147788524628, -0.4579333961009979, 0.3108976185321808, -0.04181168973445892, -0.046459492295980453, 0.3516492247581482, 0.03348299860954285, -0.4421769380569458, 0.9525157809257507, -0.1678801029920578, 0.7725412845611572, -0.5591135621070862, -0.037952397018671036, -0.38991010189056396, 0.6099099516868591, -0.4430181384086609, -1.1052980422973633, 0.20517398416996002, 0.049930501729249954, -0.15826570987701416, -0.19466805458068848, 0.6590033769607544, -0.18014372885227203, -0.8222542405128479, 0.17341454327106476, 0.0853511318564415, 0.07323521375656128, 0.482034832239151, -0.6959196925163269, -0.2800944149494171, -0.059251461178064346, -0.5310298800468445, 0.09660752862691879, 0.28396835923194885, 0.33295708894729614, 0.5227736234664917, 0.6057719588279724, 0.16973891854286194, 0.4103405475616455, -0.5477555990219116, 0.7366576194763184, -1.0530587434768677, -0.722783088684082, -0.9076922535896301, 0.4618464410305023, -0.3474588394165039, -0.8341980576515198, 1.0080111026763916, 1.0290768146514893, 0.8659139275550842, -0.025645555928349495, 0.6498210430145264, -0.391323059797287, 0.32621943950653076, -0.3966885209083557, 0.9591114521026611, -0.8610348701477051, -0.20033256709575653, -0.2812011241912842, -0.7119584679603577, -0.38400155305862427, 0.8838199377059937, -0.15724128484725952, 0.06613066792488098, 1.0959562063217163, 0.6345102787017822, -0.11298071593046188, 0.040421806275844574, -0.06844903528690338, 0.6169016361236572, 0.3834827244281769, 0.9956052899360657, 0.6319498419761658, -0.806983470916748, 0.3054153621196747, -0.5012832283973694, -0.44657251238822937, -0.3845676779747009, -0.4868949055671692, -0.886442244052887, -0.500430703163147, -0.24796423316001892, -0.6345599293708801, -0.0873924046754837, 0.981727123260498, 0.46094226837158203, -0.9564589262008667, -0.4731881618499756, -0.12008518725633621, 0.16474410891532898, -0.6187599897384644, -0.4209862947463989, 0.73299241065979, -0.11054423451423645, -0.5094982385635376, 0.19480419158935547, -0.13413360714912415, 0.21499554812908173, 0.10237657278776169, -0.41071826219558716, -0.7488052248954773, 0.017723917961120605, 0.44255325198173523, 0.34261444211006165, -0.6960846185684204, -0.7365630269050598, 0.28835952281951904, -0.46046727895736694, 0.44425252079963684, -0.03529195114970207, -0.5083809494972229, 0.022195978090167046, 0.7195080518722534, 0.44144105911254883, 0.7170697450637817, -0.06121089309453964, 0.08483586460351944, -0.6569929718971252, 0.16082948446273804, -0.0037287864834070206, 0.27466249465942383, -0.032466717064380646, -0.29839909076690674, 0.7788362503051758, 0.6512526869773865, -0.508625864982605, -1.0974767208099365, -0.4001833200454712, -1.4756766557693481, -0.07022766023874283, 1.1037540435791016, 0.05259188637137413, -0.5034900903701782, 0.26814451813697815, -0.0719534233212471, 0.1901072859764099, -0.33575838804244995, 0.7281188368797302, 0.778762698173523, -0.3655936121940613, 0.06197802722454071, -0.6511528491973877, 0.3535800278186798, 0.49873071908950806, -1.1815803050994873, -0.079844631254673, 0.22430753707885742, 0.347902774810791, 0.3326326906681061, 0.6269212961196899, -0.09430006891489029, 0.26036757230758667, 0.2094482034444809, 0.004491867497563362, -0.03833597153425217, 0.043721701949834824, -0.21329598128795624, 0.08983824402093887, -0.26769939064979553, -0.4333878755569458 ]
open-llm-leaderboard/details_TheBloke__robin-13B-v2-fp16
open-llm-leaderboard
2023-08-27T12:34:06Z
201
0
[ "region:us" ]
null
2023-08-18T11:28:36Z
--- pretty_name: Evaluation run of TheBloke/robin-13B-v2-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/robin-13B-v2-fp16](https://huggingface.co/TheBloke/robin-13B-v2-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__robin-13B-v2-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-31T15:48:06.598529](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__robin-13B-v2-fp16/blob/main/results_2023-07-31T15%3A48%3A06.598529.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.49056004249413854,\n\ \ \"acc_stderr\": 0.034895228964178376,\n \"acc_norm\": 0.49452555601900244,\n\ \ \"acc_norm_stderr\": 0.03487806793899599,\n \"mc1\": 0.34149326805385555,\n\ \ \"mc1_stderr\": 0.016600688619950826,\n \"mc2\": 0.5063100731922137,\n\ \ \"mc2_stderr\": 0.014760623429029368\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5401023890784983,\n \"acc_stderr\": 0.01456431885692485,\n\ \ \"acc_norm\": 0.5648464163822525,\n \"acc_norm_stderr\": 0.014487986197186045\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5945030870344553,\n\ \ \"acc_stderr\": 0.004899845087183104,\n \"acc_norm\": 0.8037243576976698,\n\ \ \"acc_norm_stderr\": 0.003963677261161229\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4666666666666667,\n\ \ \"acc_stderr\": 0.043097329010363554,\n \"acc_norm\": 0.4666666666666667,\n\ \ \"acc_norm_stderr\": 0.043097329010363554\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.4868421052631579,\n \"acc_stderr\": 0.04067533136309173,\n\ \ \"acc_norm\": 0.4868421052631579,\n \"acc_norm_stderr\": 0.04067533136309173\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.45,\n\ \ \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\"\ : 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"\ acc\": 0.4679245283018868,\n \"acc_stderr\": 0.03070948699255655,\n \ \ \"acc_norm\": 0.4679245283018868,\n \"acc_norm_stderr\": 0.03070948699255655\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4722222222222222,\n\ \ \"acc_stderr\": 0.04174752578923185,\n \"acc_norm\": 0.4722222222222222,\n\ \ \"acc_norm_stderr\": 0.04174752578923185\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n\ \ \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117317,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117317\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.44508670520231214,\n\ \ \"acc_stderr\": 0.03789401760283646,\n \"acc_norm\": 0.44508670520231214,\n\ \ \"acc_norm_stderr\": 0.03789401760283646\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.0379328118530781,\n\ \ \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.0379328118530781\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n\ \ \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.03202563076101735,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.03202563076101735\n },\n\ \ \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.30701754385964913,\n\ \ \"acc_stderr\": 0.04339138322579861,\n \"acc_norm\": 0.30701754385964913,\n\ \ \"acc_norm_stderr\": 0.04339138322579861\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4068965517241379,\n \"acc_stderr\": 0.04093793981266237,\n\ \ \"acc_norm\": 0.4068965517241379,\n \"acc_norm_stderr\": 0.04093793981266237\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.25925925925925924,\n \"acc_stderr\": 0.02256989707491841,\n \"\ acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02256989707491841\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\ \ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\ \ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.49032258064516127,\n\ \ \"acc_stderr\": 0.028438677998909558,\n \"acc_norm\": 0.49032258064516127,\n\ \ \"acc_norm_stderr\": 0.028438677998909558\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n\ \ \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\"\ : 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6303030303030303,\n \"acc_stderr\": 0.037694303145125674,\n\ \ \"acc_norm\": 0.6303030303030303,\n \"acc_norm_stderr\": 0.037694303145125674\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5606060606060606,\n \"acc_stderr\": 0.03536085947529479,\n \"\ acc_norm\": 0.5606060606060606,\n \"acc_norm_stderr\": 0.03536085947529479\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.6683937823834197,\n \"acc_stderr\": 0.03397636541089118,\n\ \ \"acc_norm\": 0.6683937823834197,\n \"acc_norm_stderr\": 0.03397636541089118\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.44871794871794873,\n \"acc_stderr\": 0.025217315184846482,\n\ \ \"acc_norm\": 0.44871794871794873,\n \"acc_norm_stderr\": 0.025217315184846482\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.23333333333333334,\n \"acc_stderr\": 0.02578787422095932,\n \ \ \"acc_norm\": 0.23333333333333334,\n \"acc_norm_stderr\": 0.02578787422095932\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.0322529423239964,\n \ \ \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.0322529423239964\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"\ acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.6605504587155964,\n \"acc_stderr\": 0.02030210934266235,\n \"\ acc_norm\": 0.6605504587155964,\n \"acc_norm_stderr\": 0.02030210934266235\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.30092592592592593,\n \"acc_stderr\": 0.03128039084329882,\n \"\ acc_norm\": 0.30092592592592593,\n \"acc_norm_stderr\": 0.03128039084329882\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.6274509803921569,\n \"acc_stderr\": 0.03393388584958404,\n \"\ acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.03393388584958404\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7215189873417721,\n \"acc_stderr\": 0.029178682304842544,\n \ \ \"acc_norm\": 0.7215189873417721,\n \"acc_norm_stderr\": 0.029178682304842544\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5695067264573991,\n\ \ \"acc_stderr\": 0.033231973029429394,\n \"acc_norm\": 0.5695067264573991,\n\ \ \"acc_norm_stderr\": 0.033231973029429394\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6106870229007634,\n \"acc_stderr\": 0.04276486542814591,\n\ \ \"acc_norm\": 0.6106870229007634,\n \"acc_norm_stderr\": 0.04276486542814591\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"\ acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5740740740740741,\n\ \ \"acc_stderr\": 0.0478034362693679,\n \"acc_norm\": 0.5740740740740741,\n\ \ \"acc_norm_stderr\": 0.0478034362693679\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5828220858895705,\n \"acc_stderr\": 0.03874102859818081,\n\ \ \"acc_norm\": 0.5828220858895705,\n \"acc_norm_stderr\": 0.03874102859818081\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n\ \ \"acc_stderr\": 0.04745033255489122,\n \"acc_norm\": 0.5089285714285714,\n\ \ \"acc_norm_stderr\": 0.04745033255489122\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6407766990291263,\n \"acc_stderr\": 0.047504583990416946,\n\ \ \"acc_norm\": 0.6407766990291263,\n \"acc_norm_stderr\": 0.047504583990416946\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7521367521367521,\n\ \ \"acc_stderr\": 0.0282863240755644,\n \"acc_norm\": 0.7521367521367521,\n\ \ \"acc_norm_stderr\": 0.0282863240755644\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \ \ \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6883780332056194,\n\ \ \"acc_stderr\": 0.016562433867284176,\n \"acc_norm\": 0.6883780332056194,\n\ \ \"acc_norm_stderr\": 0.016562433867284176\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.026919095102908273,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.026919095102908273\n \ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25027932960893856,\n\ \ \"acc_stderr\": 0.01448750085285041,\n \"acc_norm\": 0.25027932960893856,\n\ \ \"acc_norm_stderr\": 0.01448750085285041\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5065359477124183,\n \"acc_stderr\": 0.028627470550556047,\n\ \ \"acc_norm\": 0.5065359477124183,\n \"acc_norm_stderr\": 0.028627470550556047\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5337620578778135,\n\ \ \"acc_stderr\": 0.028333277109562786,\n \"acc_norm\": 0.5337620578778135,\n\ \ \"acc_norm_stderr\": 0.028333277109562786\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5524691358024691,\n \"acc_stderr\": 0.02766713856942271,\n\ \ \"acc_norm\": 0.5524691358024691,\n \"acc_norm_stderr\": 0.02766713856942271\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.37943262411347517,\n \"acc_stderr\": 0.028947338851614105,\n \ \ \"acc_norm\": 0.37943262411347517,\n \"acc_norm_stderr\": 0.028947338851614105\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4211212516297262,\n\ \ \"acc_stderr\": 0.012610325733489903,\n \"acc_norm\": 0.4211212516297262,\n\ \ \"acc_norm_stderr\": 0.012610325733489903\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.03035969707904612,\n\ \ \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.03035969707904612\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.48366013071895425,\n \"acc_stderr\": 0.020217030653186453,\n \ \ \"acc_norm\": 0.48366013071895425,\n \"acc_norm_stderr\": 0.020217030653186453\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5636363636363636,\n\ \ \"acc_stderr\": 0.04750185058907296,\n \"acc_norm\": 0.5636363636363636,\n\ \ \"acc_norm_stderr\": 0.04750185058907296\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5551020408163265,\n \"acc_stderr\": 0.031814251181977865,\n\ \ \"acc_norm\": 0.5551020408163265,\n \"acc_norm_stderr\": 0.031814251181977865\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6567164179104478,\n\ \ \"acc_stderr\": 0.03357379665433431,\n \"acc_norm\": 0.6567164179104478,\n\ \ \"acc_norm_stderr\": 0.03357379665433431\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932264,\n \ \ \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932264\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n\ \ \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.4578313253012048,\n\ \ \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.695906432748538,\n \"acc_stderr\": 0.0352821125824523,\n\ \ \"acc_norm\": 0.695906432748538,\n \"acc_norm_stderr\": 0.0352821125824523\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.34149326805385555,\n\ \ \"mc1_stderr\": 0.016600688619950826,\n \"mc2\": 0.5063100731922137,\n\ \ \"mc2_stderr\": 0.014760623429029368\n }\n}\n```" repo_url: https://huggingface.co/TheBloke/robin-13B-v2-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|arc:challenge|25_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hellaswag|10_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:48:06.598529.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:48:06.598529.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T15_48_06.598529 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T15:48:06.598529.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T15:48:06.598529.parquet' - config_name: results data_files: - split: 2023_07_31T15_48_06.598529 path: - results_2023-07-31T15:48:06.598529.parquet - split: latest path: - results_2023-07-31T15:48:06.598529.parquet --- # Dataset Card for Evaluation run of TheBloke/robin-13B-v2-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/robin-13B-v2-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/robin-13B-v2-fp16](https://huggingface.co/TheBloke/robin-13B-v2-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__robin-13B-v2-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-31T15:48:06.598529](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__robin-13B-v2-fp16/blob/main/results_2023-07-31T15%3A48%3A06.598529.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.49056004249413854, "acc_stderr": 0.034895228964178376, "acc_norm": 0.49452555601900244, "acc_norm_stderr": 0.03487806793899599, "mc1": 0.34149326805385555, "mc1_stderr": 0.016600688619950826, "mc2": 0.5063100731922137, "mc2_stderr": 0.014760623429029368 }, "harness|arc:challenge|25": { "acc": 0.5401023890784983, "acc_stderr": 0.01456431885692485, "acc_norm": 0.5648464163822525, "acc_norm_stderr": 0.014487986197186045 }, "harness|hellaswag|10": { "acc": 0.5945030870344553, "acc_stderr": 0.004899845087183104, "acc_norm": 0.8037243576976698, "acc_norm_stderr": 0.003963677261161229 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4868421052631579, "acc_stderr": 0.04067533136309173, "acc_norm": 0.4868421052631579, "acc_norm_stderr": 0.04067533136309173 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4679245283018868, "acc_stderr": 0.03070948699255655, "acc_norm": 0.4679245283018868, "acc_norm_stderr": 0.03070948699255655 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04174752578923185, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04174752578923185 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117317, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117317 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.44508670520231214, "acc_stderr": 0.03789401760283646, "acc_norm": 0.44508670520231214, "acc_norm_stderr": 0.03789401760283646 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.0379328118530781, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.0379328118530781 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4, "acc_stderr": 0.03202563076101735, "acc_norm": 0.4, "acc_norm_stderr": 0.03202563076101735 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.04339138322579861, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.04339138322579861 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4068965517241379, "acc_stderr": 0.04093793981266237, "acc_norm": 0.4068965517241379, "acc_norm_stderr": 0.04093793981266237 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02256989707491841, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02256989707491841 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.49032258064516127, "acc_stderr": 0.028438677998909558, "acc_norm": 0.49032258064516127, "acc_norm_stderr": 0.028438677998909558 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6303030303030303, "acc_stderr": 0.037694303145125674, "acc_norm": 0.6303030303030303, "acc_norm_stderr": 0.037694303145125674 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5606060606060606, "acc_stderr": 0.03536085947529479, "acc_norm": 0.5606060606060606, "acc_norm_stderr": 0.03536085947529479 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6683937823834197, "acc_stderr": 0.03397636541089118, "acc_norm": 0.6683937823834197, "acc_norm_stderr": 0.03397636541089118 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.44871794871794873, "acc_stderr": 0.025217315184846482, "acc_norm": 0.44871794871794873, "acc_norm_stderr": 0.025217315184846482 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.02578787422095932, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.02578787422095932 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.0322529423239964, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.0322529423239964 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6605504587155964, "acc_stderr": 0.02030210934266235, "acc_norm": 0.6605504587155964, "acc_norm_stderr": 0.02030210934266235 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.03128039084329882, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.03128039084329882 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6274509803921569, "acc_stderr": 0.03393388584958404, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.03393388584958404 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7215189873417721, "acc_stderr": 0.029178682304842544, "acc_norm": 0.7215189873417721, "acc_norm_stderr": 0.029178682304842544 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5695067264573991, "acc_stderr": 0.033231973029429394, "acc_norm": 0.5695067264573991, "acc_norm_stderr": 0.033231973029429394 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.04276486542814591, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5740740740740741, "acc_stderr": 0.0478034362693679, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.0478034362693679 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5828220858895705, "acc_stderr": 0.03874102859818081, "acc_norm": 0.5828220858895705, "acc_norm_stderr": 0.03874102859818081 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489122, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489122 }, "harness|hendrycksTest-management|5": { "acc": 0.6407766990291263, "acc_stderr": 0.047504583990416946, "acc_norm": 0.6407766990291263, "acc_norm_stderr": 0.047504583990416946 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7521367521367521, "acc_stderr": 0.0282863240755644, "acc_norm": 0.7521367521367521, "acc_norm_stderr": 0.0282863240755644 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6883780332056194, "acc_stderr": 0.016562433867284176, "acc_norm": 0.6883780332056194, "acc_norm_stderr": 0.016562433867284176 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5, "acc_stderr": 0.026919095102908273, "acc_norm": 0.5, "acc_norm_stderr": 0.026919095102908273 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25027932960893856, "acc_stderr": 0.01448750085285041, "acc_norm": 0.25027932960893856, "acc_norm_stderr": 0.01448750085285041 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5065359477124183, "acc_stderr": 0.028627470550556047, "acc_norm": 0.5065359477124183, "acc_norm_stderr": 0.028627470550556047 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5337620578778135, "acc_stderr": 0.028333277109562786, "acc_norm": 0.5337620578778135, "acc_norm_stderr": 0.028333277109562786 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5524691358024691, "acc_stderr": 0.02766713856942271, "acc_norm": 0.5524691358024691, "acc_norm_stderr": 0.02766713856942271 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.37943262411347517, "acc_stderr": 0.028947338851614105, "acc_norm": 0.37943262411347517, "acc_norm_stderr": 0.028947338851614105 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4211212516297262, "acc_stderr": 0.012610325733489903, "acc_norm": 0.4211212516297262, "acc_norm_stderr": 0.012610325733489903 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5147058823529411, "acc_stderr": 0.03035969707904612, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.03035969707904612 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.48366013071895425, "acc_stderr": 0.020217030653186453, "acc_norm": 0.48366013071895425, "acc_norm_stderr": 0.020217030653186453 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5636363636363636, "acc_stderr": 0.04750185058907296, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.04750185058907296 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5551020408163265, "acc_stderr": 0.031814251181977865, "acc_norm": 0.5551020408163265, "acc_norm_stderr": 0.031814251181977865 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6567164179104478, "acc_stderr": 0.03357379665433431, "acc_norm": 0.6567164179104478, "acc_norm_stderr": 0.03357379665433431 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.04163331998932264, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932264 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.038786267710023595, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.038786267710023595 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.695906432748538, "acc_stderr": 0.0352821125824523, "acc_norm": 0.695906432748538, "acc_norm_stderr": 0.0352821125824523 }, "harness|truthfulqa:mc|0": { "mc1": 0.34149326805385555, "mc1_stderr": 0.016600688619950826, "mc2": 0.5063100731922137, "mc2_stderr": 0.014760623429029368 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.717986524105072, -0.8836963176727295, 0.27766892313957214, 0.1908804029226303, -0.14612644910812378, -0.03568277135491371, 0.014435450546443462, -0.26173388957977295, 0.6063985228538513, -0.07618620991706848, -0.5484844446182251, -0.6741297245025635, -0.4598732590675354, 0.2521236538887024, -0.06324237585067749, 0.8018485903739929, -0.16253113746643066, -0.17515815794467926, 0.11358650028705597, -0.013076660223305225, -0.25850874185562134, -0.2943069040775299, -0.478303998708725, -0.34613490104675293, 0.21355058252811432, 0.47915348410606384, 0.46325990557670593, 0.7812235355377197, 0.7057571411132812, 0.2951233685016632, -0.2813716232776642, -0.010758087038993835, -0.1678151935338974, -0.2785036563873291, 0.3685082495212555, -0.3005409240722656, -0.8144447207450867, 0.3204663097858429, 0.7627732157707214, 0.6385482549667358, -0.08587411791086197, 0.31592679023742676, 0.014299745671451092, 0.6320009231567383, -0.3457314074039459, 0.06455645710229874, -0.3052547574043274, 0.21089771389961243, -0.2065739780664444, -0.25136902928352356, -0.2913355231285095, -0.23011906445026398, -0.1544196754693985, -0.8344801068305969, 0.25215134024620056, 0.361873060464859, 1.5724430084228516, -0.16341130435466766, -0.2133440524339676, 0.10248322784900665, -0.11717361956834793, 1.0464303493499756, -0.8567249774932861, 0.33380311727523804, 0.7624561190605164, 0.1494753062725067, -0.08902114629745483, -0.5842897295951843, -0.6321163177490234, 0.06851659715175629, -0.3731597363948822, 0.39533731341362, -0.1255836933851242, -0.19576841592788696, 0.35630810260772705, 0.649468183517456, -0.6626878380775452, 0.18420077860355377, -0.6601109504699707, -0.17170962691307068, 1.0646414756774902, 0.3130824863910675, 0.07614016532897949, -0.39631348848342896, -0.6743157505989075, -0.6647164821624756, -0.3914027214050293, 0.2792602479457855, 0.4153711497783661, 0.40159276127815247, -0.3983188569545746, 0.7220936417579651, -0.3853543996810913, 0.5900452733039856, 0.4284113645553589, 0.04810057953000069, 0.8654268980026245, -0.6702792644500732, -0.533169150352478, -0.04795293137431145, 1.1210402250289917, 0.563868522644043, 0.09537549316883087, 0.2510223984718323, 0.007089430000633001, -0.12283957004547119, 0.021372755989432335, -0.8372026085853577, -0.27555838227272034, 0.21127808094024658, -0.40996330976486206, -0.487164705991745, 0.4169699549674988, -0.859732985496521, 0.15415742993354797, -0.02374490164220333, 0.4155727028846741, -0.5255663990974426, -0.14490483701229095, 0.2063969075679779, -0.4134390354156494, 0.7916950583457947, -0.1815856695175171, -0.7783786058425903, 0.392706960439682, 0.5312300324440002, 0.7635220289230347, -0.09853952378034592, -0.46321818232536316, -0.18836070597171783, -0.10190051048994064, -0.29533588886260986, 0.5410811305046082, -0.28327441215515137, -0.3965546488761902, -0.2585458755493164, 0.28679874539375305, -0.27407097816467285, -0.37595638632774353, 0.7036686539649963, -0.21568132936954498, 0.19627314805984497, -0.44279932975769043, -0.6574733257293701, 0.12984536588191986, 0.35614386200904846, -0.439548522233963, 1.2709702253341675, 0.2965453863143921, -0.8233023285865784, 0.3824765980243683, -0.6275412440299988, -0.22372765839099884, -0.022263942286372185, -0.040232934057712555, -0.7960964441299438, -0.24428540468215942, 0.17930461466312408, 0.43629124760627747, -0.16202954947948456, -0.12070020288228989, -0.3835762143135071, -0.3723844885826111, 0.3703345060348511, -0.1346319317817688, 1.2575639486312866, -0.01008979044854641, -0.7425245642662048, -0.08001816272735596, -1.2394176721572876, 0.3268713057041168, 0.19495932757854462, -0.33634188771247864, -0.1357267200946808, -0.4682910144329071, 0.010293201543390751, 0.14902858436107635, 0.26534610986709595, -0.8120926022529602, 0.23627789318561554, -0.3190726637840271, 0.17620433866977692, 1.2521722316741943, 0.05468036234378815, 0.13598811626434326, -0.6016131043434143, 0.5319457650184631, 0.19468152523040771, 0.2151104360818863, 0.3944600820541382, -0.6407139897346497, -0.7532710433006287, -0.5031064748764038, -0.03509940579533577, 0.6322773098945618, -0.20282171666622162, 1.179073691368103, 0.08411672711372375, -0.8655428886413574, -0.4690614640712738, -0.13740944862365723, 0.4863356649875641, 0.7976595163345337, 0.6203904151916504, -0.010096061043441296, -0.644722580909729, -1.1452093124389648, -0.3125081956386566, -0.19033075869083405, 0.12033885717391968, 0.20688310265541077, 1.025533676147461, -0.27018746733665466, 0.6142300367355347, -1.0067172050476074, -0.23191751539707184, 0.1127912774682045, -0.053980082273483276, 0.8002200126647949, 0.7310377359390259, 0.5719740390777588, -0.636099100112915, -0.5612778067588806, 0.1933172345161438, -0.8679686784744263, -0.11932405829429626, 0.09724258631467819, -0.3116796612739563, 0.1615275740623474, 0.12749223411083221, -0.6521378755569458, 0.5141676664352417, 0.2354193478822708, -1.0475746393203735, 1.0370694398880005, -0.3210550546646118, 0.5577266216278076, -0.964382529258728, 0.18831685185432434, -0.029925618320703506, 0.045854564756155014, -0.4899824261665344, 0.02294570580124855, 0.12803030014038086, 0.447398841381073, -0.46138519048690796, 0.7950727343559265, -0.7084113359451294, -0.07232575118541718, 0.48765966296195984, 0.12993943691253662, -0.14632005989551544, 0.32451608777046204, -0.21250569820404053, 0.8118183612823486, 0.7455320358276367, -0.43963655829429626, 0.4831851124763489, 0.4362976551055908, -0.2492458075284958, 0.7092862725257874, -0.5046248435974121, -0.3102286458015442, 0.31241628527641296, -0.11573929339647293, -0.8612374067306519, -0.484916627407074, 0.04642534628510475, -0.6186006665229797, -0.07883325219154358, 0.3995400369167328, -0.2796219289302826, -0.8299022316932678, -0.9683763384819031, 0.2500954568386078, 0.7767806053161621, -0.42532527446746826, -0.16186222434043884, 0.05063057690858841, 0.09566442668437958, -0.8164048194885254, -0.8686391711235046, -0.4699956774711609, -0.1744292974472046, -0.7480270862579346, 0.3095132112503052, -0.28461000323295593, -0.31461480259895325, -0.0912967175245285, -0.2470352202653885, -0.32260599732398987, -0.01764681376516819, 0.1072145625948906, 0.6627222299575806, -0.3776472508907318, -0.3238492012023926, -0.2248493880033493, -0.1659257560968399, 0.24965670704841614, -0.06468174606561661, 0.36292484402656555, -0.4713827967643738, -0.37833958864212036, -0.45628830790519714, 0.012201733887195587, 0.6952588558197021, -0.08940043300390244, 0.7598435282707214, 0.435590922832489, -0.2687094509601593, -0.027055542916059494, -0.3117675483226776, -0.27556484937667847, -0.5896584987640381, 0.26873859763145447, -0.4805788993835449, -1.0318585634231567, 0.789916455745697, 0.5658891797065735, 0.016322996467351913, 1.1449321508407593, 0.5748063921928406, -0.3693889379501343, 0.9567883610725403, 0.0029221619479358196, 0.367409348487854, 0.4190903306007385, -0.7003073692321777, 0.11903560161590576, -0.9383964538574219, -0.29951775074005127, -0.5737656354904175, -0.5067078471183777, -0.7157297730445862, -0.06209335848689079, 0.2992999851703644, 0.12277383357286453, -0.7141997814178467, 0.5453688502311707, -0.827584445476532, 0.5605241060256958, 0.642822265625, 0.32107242941856384, 0.151973158121109, -0.11868728697299957, -0.3521481156349182, -0.14889933168888092, -0.46836766600608826, -0.21739669144153595, 1.2275390625, 0.23346878588199615, 0.6953173279762268, 0.10178209096193314, 0.9163030982017517, 0.14251649379730225, -0.061904169619083405, -0.5691393613815308, 0.6532863974571228, 0.08910942822694778, -0.8149310350418091, -0.4362109899520874, -0.5060988068580627, -1.0753980875015259, 0.33859676122665405, -0.1640433371067047, -0.8218450546264648, 0.11645457148551941, 0.015843519940972328, -0.18039564788341522, 0.5024725198745728, -0.5426267981529236, 0.833856463432312, -0.13872890174388885, -0.501095712184906, 0.0894278883934021, -0.8646440505981445, 0.47058969736099243, 0.15963798761367798, 0.28817591071128845, 0.051752008497714996, 0.2496192902326584, 1.1681694984436035, -0.8323767781257629, 0.4550204575061798, 0.0643192008137703, 0.060810722410678864, 0.3646578788757324, -0.14566294848918915, 0.5158261060714722, 0.04398548603057861, -0.03541293740272522, -0.11714555323123932, 0.2912997901439667, -0.8680991530418396, -0.03117429092526436, 0.8839403986930847, -0.9662400484085083, -0.5808895230293274, -0.9186916351318359, -0.5330151319503784, 0.07634299248456955, 0.5827011466026306, 0.3604355752468109, 0.5161490440368652, 0.018209530040621758, 0.47027769684791565, 0.829412043094635, -0.15846003592014313, 0.6028814315795898, 0.26665937900543213, 0.06596498191356659, -0.6639745831489563, 0.8259177207946777, 0.11027970910072327, 0.38583698868751526, 0.3108510971069336, 0.4479671120643616, -0.5512789487838745, -0.23543508350849152, -0.1635332703590393, 0.5095195770263672, -0.60599285364151, -0.28357163071632385, -0.3419676125049591, -0.3885996639728546, -0.7477680444717407, -0.629838764667511, -0.28690671920776367, -0.5177435278892517, -0.44194644689559937, -0.4816058278083801, 0.6133195757865906, 0.5115862488746643, -0.44753167033195496, 0.06409766525030136, -0.5140875577926636, 0.27333420515060425, 0.34563130140304565, 0.5461143851280212, -0.3664184808731079, -0.5804539322853088, 0.060704272240400314, -0.1352488100528717, -0.5294579863548279, -0.9574204683303833, 0.3292430341243744, -0.02704557403922081, 0.5099318623542786, 0.5861791372299194, 0.1059381514787674, 0.8169032335281372, -0.2167794555425644, 1.0570378303527832, 0.37242546677589417, -0.8232709169387817, 0.7573657035827637, -0.38069677352905273, 0.1644657552242279, 0.6600677371025085, 0.16356249153614044, -0.2027209848165512, -0.6870613098144531, -1.3309853076934814, -0.7910423874855042, 0.6544381380081177, 0.4054226577281952, -0.2922409176826477, 0.010118571110069752, 0.16550534963607788, -0.30192822217941284, -0.16722536087036133, -0.6749404072761536, -0.8829480409622192, -0.14620855450630188, -0.5206966996192932, 0.09856922179460526, 0.015453583560883999, -0.38170596957206726, -0.8508462905883789, 0.9251108765602112, 0.053544458001852036, 0.6126108169555664, 0.4576397240161896, 0.05797778069972992, 0.022850895300507545, 0.46003657579421997, 0.9171462059020996, 0.7960094809532166, -0.5163975358009338, 0.3980535864830017, 0.3501032292842865, -1.0419381856918335, 0.43149131536483765, 0.3070836365222931, -0.09917803108692169, -0.031680334359407425, 0.5486287474632263, 0.39649292826652527, 0.049313757568597794, -0.21067042648792267, 0.6224716305732727, 0.025136511772871017, -0.5765260457992554, -0.3914317488670349, 0.0978747308254242, -0.12195718288421631, -0.0059136273339390755, 0.41550758481025696, -0.14285388588905334, -0.058849431574344635, -0.5382786989212036, 0.4945071041584015, 0.3850915729999542, -0.4524281919002533, -0.1439611166715622, 0.7287495732307434, -0.18917247653007507, -0.19657884538173676, 0.3790147304534912, -0.17359240353107452, -0.6199736595153809, 1.1360039710998535, 0.5957918167114258, 0.6866469979286194, -0.2514405846595764, -0.04461611062288284, 0.9410895705223083, 0.4336499869823456, -0.0455487035214901, 0.5773665904998779, 0.34241828322410583, -0.26825588941574097, 0.1771012544631958, -0.8348589539527893, -0.06809893995523453, 0.11030033975839615, -0.8857470750808716, 0.2819920480251312, -0.5394073724746704, -0.18248094618320465, 0.007384228520095348, 0.4380595088005066, -0.48024263978004456, 0.5179703235626221, -0.43267276883125305, 1.2382220029830933, -0.9795641899108887, 0.7110130786895752, 0.735368549823761, -0.5857362151145935, -1.0479177236557007, -0.529666543006897, 0.00004323360190028325, -0.8626257181167603, 0.5755721926689148, -0.05977294594049454, 0.17782293260097504, -0.06253980100154877, -0.7368007898330688, -0.8914161324501038, 1.401411533355713, -0.07684750854969025, -0.49932029843330383, 0.279649555683136, -0.050632353872060776, 0.4214940369129181, 0.13881048560142517, 0.5739821791648865, 0.7598227262496948, 0.8914068937301636, -0.09491543471813202, -0.7322994470596313, 0.3318295180797577, -0.4822862148284912, -0.3203893005847931, 0.4385747015476227, -0.9199564456939697, 1.2022013664245605, -0.0212953332811594, 0.21572646498680115, -0.16878144443035126, 0.6322532892227173, 0.7981610894203186, 0.27303895354270935, 0.3506084680557251, 0.8513815402984619, 0.8700708746910095, -0.5103999376296997, 1.0683993101119995, -0.24022090435028076, 0.8611226677894592, 0.7248674631118774, 0.20493298768997192, 0.825236439704895, 0.6880141496658325, -0.6082721948623657, 0.5485655665397644, 0.7979217171669006, -0.3200489580631256, 0.4030146300792694, 0.262857049703598, -0.07699158787727356, -0.1440245509147644, 0.46947363018989563, -0.8550613522529602, 0.09309449046850204, 0.04593275114893913, -0.3260387182235718, 0.10609450936317444, -0.4554155170917511, 0.3020423948764801, -0.030406909063458443, -0.049272697418928146, 0.3571503162384033, 0.024224573746323586, -0.43716275691986084, 0.9317673444747925, -0.1692054271697998, 0.7638964056968689, -0.5678136944770813, -0.05149439722299576, -0.4031493663787842, 0.6103282570838928, -0.43765389919281006, -1.102258324623108, 0.20809131860733032, 0.06423560529947281, -0.14943277835845947, -0.205491840839386, 0.662066638469696, -0.18569183349609375, -0.8371147513389587, 0.169453427195549, 0.11000505834817886, 0.07584336400032043, 0.48162758350372314, -0.6947585344314575, -0.2656187117099762, -0.03929263725876808, -0.5195565223693848, 0.09546879678964615, 0.30593010783195496, 0.32666414976119995, 0.5179086327552795, 0.5981025099754333, 0.16086260974407196, 0.4033434987068176, -0.5120265483856201, 0.7385957837104797, -1.0670472383499146, -0.6915615200996399, -0.9086130261421204, 0.4654957950115204, -0.3507305979728699, -0.8357022404670715, 1.008610486984253, 1.0307930707931519, 0.8448086380958557, -0.012356296181678772, 0.6551831364631653, -0.40087780356407166, 0.3439093828201294, -0.3976912498474121, 0.9454865455627441, -0.8564447164535522, -0.215498685836792, -0.27617260813713074, -0.7045725584030151, -0.39442628622055054, 0.9095964431762695, -0.1470184326171875, 0.06164829432964325, 1.1048064231872559, 0.6275631189346313, -0.10342997312545776, 0.05204553157091141, -0.0702228844165802, 0.6180252432823181, 0.38269028067588806, 0.9813380241394043, 0.6435062289237976, -0.7884939908981323, 0.3091726303100586, -0.504298746585846, -0.44490954279899597, -0.3670307397842407, -0.5000219941139221, -0.8925867080688477, -0.4974042773246765, -0.2403525412082672, -0.6381784677505493, -0.10444888472557068, 1.0151643753051758, 0.43685463070869446, -0.945904016494751, -0.4627836346626282, -0.11197860538959503, 0.13116860389709473, -0.6351247429847717, -0.41292551159858704, 0.7387831807136536, -0.11884460598230362, -0.5133798122406006, 0.19236494600772858, -0.1462501585483551, 0.20851925015449524, 0.10994907468557358, -0.38810086250305176, -0.7448739409446716, 0.021461250260472298, 0.45842206478118896, 0.3243257999420166, -0.6997004747390747, -0.7465212941169739, 0.31328248977661133, -0.4525797665119171, 0.4373779594898224, -0.037664540112018585, -0.5053962469100952, 0.02568184770643711, 0.6902157068252563, 0.464111864566803, 0.7081003785133362, -0.025751110166311264, 0.08328499644994736, -0.6660115718841553, 0.15120470523834229, -0.011656219139695168, 0.27166396379470825, -0.03308267518877983, -0.28660809993743896, 0.798346757888794, 0.6482208371162415, -0.5079361200332642, -1.079923152923584, -0.413811057806015, -1.4901282787322998, -0.06860664486885071, 1.106532335281372, 0.07539350539445877, -0.4992120862007141, 0.2637580335140228, -0.09902840107679367, 0.1689489781856537, -0.34560874104499817, 0.7456706762313843, 0.7892714738845825, -0.3854144811630249, 0.06916671991348267, -0.6421468257904053, 0.3466110825538635, 0.491426557302475, -1.1754692792892456, -0.06487727910280228, 0.24980315566062927, 0.3421819806098938, 0.321509450674057, 0.6430749893188477, -0.06689425557851791, 0.2624918520450592, 0.21165205538272858, 0.012132711708545685, -0.03625430911779404, 0.03948378190398216, -0.22158873081207275, 0.08055897802114487, -0.2484273612499237, -0.42829418182373047 ]
open-llm-leaderboard/details_TheBloke__Vicuna-33B-1-3-SuperHOT-8K-fp16
open-llm-leaderboard
2023-08-27T12:34:18Z
201
0
[ "region:us" ]
null
2023-08-18T11:29:45Z
--- pretty_name: Evaluation run of TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__Vicuna-33B-1-3-SuperHOT-8K-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-31T19:04:33.192118](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Vicuna-33B-1-3-SuperHOT-8K-fp16/blob/main/results_2023-07-31T19%3A04%3A33.192118.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2367148405069541,\n\ \ \"acc_stderr\": 0.030958077810881182,\n \"acc_norm\": 0.23838963087978138,\n\ \ \"acc_norm_stderr\": 0.030974710079953026,\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.01481619599193159,\n \"mc2\": 0.4693099566156165,\n\ \ \"mc2_stderr\": 0.01667201792733067\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.21331058020477817,\n \"acc_stderr\": 0.011970971742326334,\n\ \ \"acc_norm\": 0.25426621160409557,\n \"acc_norm_stderr\": 0.012724999945157744\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.28828918542123083,\n\ \ \"acc_stderr\": 0.00452040633108404,\n \"acc_norm\": 0.3461461860187214,\n\ \ \"acc_norm_stderr\": 0.004747682003491466\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.24444444444444444,\n\ \ \"acc_stderr\": 0.03712537833614865,\n \"acc_norm\": 0.24444444444444444,\n\ \ \"acc_norm_stderr\": 0.03712537833614865\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.34,\n\ \ \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \ \ \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.025288394502891373,\n\ \ \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.025288394502891373\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n\ \ \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \ \ \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n\ \ \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \ \ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n\ \ \"acc_stderr\": 0.04096985139843671,\n \"acc_norm\": 0.2543859649122807,\n\ \ \"acc_norm_stderr\": 0.04096985139843671\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n\ \ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2275132275132275,\n \"acc_stderr\": 0.02159126940782378,\n \"\ acc_norm\": 0.2275132275132275,\n \"acc_norm_stderr\": 0.02159126940782378\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.20634920634920634,\n\ \ \"acc_stderr\": 0.0361960452412425,\n \"acc_norm\": 0.20634920634920634,\n\ \ \"acc_norm_stderr\": 0.0361960452412425\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.2838709677419355,\n \"acc_stderr\": 0.025649381063029254,\n \"\ acc_norm\": 0.2838709677419355,\n \"acc_norm_stderr\": 0.025649381063029254\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.24630541871921183,\n \"acc_stderr\": 0.030315099285617722,\n \"\ acc_norm\": 0.24630541871921183,\n \"acc_norm_stderr\": 0.030315099285617722\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.0347769116216366,\n\ \ \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.0347769116216366\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.18181818181818182,\n \"acc_stderr\": 0.027479603010538797,\n \"\ acc_norm\": 0.18181818181818182,\n \"acc_norm_stderr\": 0.027479603010538797\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860702,\n\ \ \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860702\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.20512820512820512,\n \"acc_stderr\": 0.020473233173551982,\n\ \ \"acc_norm\": 0.20512820512820512,\n \"acc_norm_stderr\": 0.020473233173551982\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.23703703703703705,\n \"acc_stderr\": 0.02592887613276612,\n \ \ \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.02592887613276612\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.027553614467863818,\n\ \ \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.027553614467863818\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.19205298013245034,\n \"acc_stderr\": 0.032162984205936135,\n \"\ acc_norm\": 0.19205298013245034,\n \"acc_norm_stderr\": 0.032162984205936135\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.21467889908256882,\n \"acc_stderr\": 0.01760430414925649,\n \"\ acc_norm\": 0.21467889908256882,\n \"acc_norm_stderr\": 0.01760430414925649\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.2638888888888889,\n \"acc_stderr\": 0.03005820270430985,\n \"\ acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03005820270430985\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n\ \ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.242152466367713,\n\ \ \"acc_stderr\": 0.028751392398694755,\n \"acc_norm\": 0.242152466367713,\n\ \ \"acc_norm_stderr\": 0.028751392398694755\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070416,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070416\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.26851851851851855,\n\ \ \"acc_stderr\": 0.04284467968052192,\n \"acc_norm\": 0.26851851851851855,\n\ \ \"acc_norm_stderr\": 0.04284467968052192\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.03291099578615767,\n\ \ \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.03291099578615767\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n\ \ \"acc_stderr\": 0.04364226155841043,\n \"acc_norm\": 0.30357142857142855,\n\ \ \"acc_norm_stderr\": 0.04364226155841043\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23931623931623933,\n\ \ \"acc_stderr\": 0.027951826808924333,\n \"acc_norm\": 0.23931623931623933,\n\ \ \"acc_norm_stderr\": 0.027951826808924333\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2554278416347382,\n\ \ \"acc_stderr\": 0.015594955384455772,\n \"acc_norm\": 0.2554278416347382,\n\ \ \"acc_norm_stderr\": 0.015594955384455772\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.20520231213872833,\n \"acc_stderr\": 0.021742519835276287,\n\ \ \"acc_norm\": 0.20520231213872833,\n \"acc_norm_stderr\": 0.021742519835276287\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23910614525139665,\n\ \ \"acc_stderr\": 0.014265554192331144,\n \"acc_norm\": 0.23910614525139665,\n\ \ \"acc_norm_stderr\": 0.014265554192331144\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.02355083135199509,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.02355083135199509\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\ \ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\ \ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2191358024691358,\n \"acc_stderr\": 0.023016705640262203,\n\ \ \"acc_norm\": 0.2191358024691358,\n \"acc_norm_stderr\": 0.023016705640262203\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290392,\n \ \ \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290392\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24771838331160365,\n\ \ \"acc_stderr\": 0.011025499291443738,\n \"acc_norm\": 0.24771838331160365,\n\ \ \"acc_norm_stderr\": 0.011025499291443738\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.21323529411764705,\n \"acc_stderr\": 0.024880971512294275,\n\ \ \"acc_norm\": 0.21323529411764705,\n \"acc_norm_stderr\": 0.024880971512294275\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.2549019607843137,\n \"acc_stderr\": 0.017630827375148383,\n \ \ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.017630827375148383\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2,\n\ \ \"acc_stderr\": 0.03831305140884601,\n \"acc_norm\": 0.2,\n \ \ \"acc_norm_stderr\": 0.03831305140884601\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.19183673469387755,\n \"acc_stderr\": 0.025206963154225378,\n\ \ \"acc_norm\": 0.19183673469387755,\n \"acc_norm_stderr\": 0.025206963154225378\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n\ \ \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n\ \ \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816508,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816508\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n\ \ \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n\ \ \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03188578017686399,\n\ \ \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03188578017686399\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.01481619599193159,\n \"mc2\": 0.4693099566156165,\n\ \ \"mc2_stderr\": 0.01667201792733067\n }\n}\n```" repo_url: https://huggingface.co/TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|arc:challenge|25_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hellaswag|10_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:04:33.192118.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T19:04:33.192118.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T19_04_33.192118 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T19:04:33.192118.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T19:04:33.192118.parquet' - config_name: results data_files: - split: 2023_07_31T19_04_33.192118 path: - results_2023-07-31T19:04:33.192118.parquet - split: latest path: - results_2023-07-31T19:04:33.192118.parquet --- # Dataset Card for Evaluation run of TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__Vicuna-33B-1-3-SuperHOT-8K-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-31T19:04:33.192118](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Vicuna-33B-1-3-SuperHOT-8K-fp16/blob/main/results_2023-07-31T19%3A04%3A33.192118.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2367148405069541, "acc_stderr": 0.030958077810881182, "acc_norm": 0.23838963087978138, "acc_norm_stderr": 0.030974710079953026, "mc1": 0.23378212974296206, "mc1_stderr": 0.01481619599193159, "mc2": 0.4693099566156165, "mc2_stderr": 0.01667201792733067 }, "harness|arc:challenge|25": { "acc": 0.21331058020477817, "acc_stderr": 0.011970971742326334, "acc_norm": 0.25426621160409557, "acc_norm_stderr": 0.012724999945157744 }, "harness|hellaswag|10": { "acc": 0.28828918542123083, "acc_stderr": 0.00452040633108404, "acc_norm": 0.3461461860187214, "acc_norm_stderr": 0.004747682003491466 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.24444444444444444, "acc_stderr": 0.03712537833614865, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.03712537833614865 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.025288394502891373, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.025288394502891373 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.04096985139843671, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.04096985139843671 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2275132275132275, "acc_stderr": 0.02159126940782378, "acc_norm": 0.2275132275132275, "acc_norm_stderr": 0.02159126940782378 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.0361960452412425, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.0361960452412425 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2838709677419355, "acc_stderr": 0.025649381063029254, "acc_norm": 0.2838709677419355, "acc_norm_stderr": 0.025649381063029254 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.24630541871921183, "acc_stderr": 0.030315099285617722, "acc_norm": 0.24630541871921183, "acc_norm_stderr": 0.030315099285617722 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2727272727272727, "acc_stderr": 0.0347769116216366, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.18181818181818182, "acc_stderr": 0.027479603010538797, "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.027479603010538797 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860702, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860702 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20512820512820512, "acc_stderr": 0.020473233173551982, "acc_norm": 0.20512820512820512, "acc_norm_stderr": 0.020473233173551982 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23703703703703705, "acc_stderr": 0.02592887613276612, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.02592887613276612 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.027553614467863818, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.027553614467863818 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.19205298013245034, "acc_stderr": 0.032162984205936135, "acc_norm": 0.19205298013245034, "acc_norm_stderr": 0.032162984205936135 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.21467889908256882, "acc_stderr": 0.01760430414925649, "acc_norm": 0.21467889908256882, "acc_norm_stderr": 0.01760430414925649 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03005820270430985, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03005820270430985 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.242152466367713, "acc_stderr": 0.028751392398694755, "acc_norm": 0.242152466367713, "acc_norm_stderr": 0.028751392398694755 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070416, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.26851851851851855, "acc_stderr": 0.04284467968052192, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.04284467968052192 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615767, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615767 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841043, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841043 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.23931623931623933, "acc_stderr": 0.027951826808924333, "acc_norm": 0.23931623931623933, "acc_norm_stderr": 0.027951826808924333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2554278416347382, "acc_stderr": 0.015594955384455772, "acc_norm": 0.2554278416347382, "acc_norm_stderr": 0.015594955384455772 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.20520231213872833, "acc_stderr": 0.021742519835276287, "acc_norm": 0.20520231213872833, "acc_norm_stderr": 0.021742519835276287 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23910614525139665, "acc_stderr": 0.014265554192331144, "acc_norm": 0.23910614525139665, "acc_norm_stderr": 0.014265554192331144 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.21568627450980393, "acc_stderr": 0.02355083135199509, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.02355083135199509 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2191358024691358, "acc_stderr": 0.023016705640262203, "acc_norm": 0.2191358024691358, "acc_norm_stderr": 0.023016705640262203 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290392, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290392 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24771838331160365, "acc_stderr": 0.011025499291443738, "acc_norm": 0.24771838331160365, "acc_norm_stderr": 0.011025499291443738 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.21323529411764705, "acc_stderr": 0.024880971512294275, "acc_norm": 0.21323529411764705, "acc_norm_stderr": 0.024880971512294275 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2549019607843137, "acc_stderr": 0.017630827375148383, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.017630827375148383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2, "acc_stderr": 0.03831305140884601, "acc_norm": 0.2, "acc_norm_stderr": 0.03831305140884601 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.19183673469387755, "acc_stderr": 0.025206963154225378, "acc_norm": 0.19183673469387755, "acc_norm_stderr": 0.025206963154225378 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03188578017686399, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03188578017686399 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.01481619599193159, "mc2": 0.4693099566156165, "mc2_stderr": 0.01667201792733067 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7149296998977661, -0.842238187789917, 0.2830698788166046, 0.18142428994178772, -0.1837678998708725, -0.07107671350240707, 0.04729112982749939, -0.24086271226406097, 0.5884573459625244, -0.07106111943721771, -0.5429884195327759, -0.6634626388549805, -0.41520586609840393, 0.22749096155166626, -0.017222121357917786, 0.8283618688583374, -0.18394619226455688, -0.09865589439868927, 0.09865077584981918, -0.028648389503359795, -0.28589102625846863, -0.34755319356918335, -0.4937945604324341, -0.35460853576660156, 0.18049447238445282, 0.4411250948905945, 0.44164296984672546, 0.7890609502792358, 0.712630569934845, 0.29889512062072754, -0.3114585876464844, 0.01486594881862402, -0.17879065871238708, -0.26051807403564453, 0.36704790592193604, -0.3972090780735016, -0.8251488208770752, 0.2621142268180847, 0.7644467949867249, 0.5275285243988037, -0.05423412099480629, 0.29247382283210754, 0.0007103016250766814, 0.5519777536392212, -0.3659895658493042, 0.03616846725344658, -0.28641563653945923, 0.23646283149719238, -0.20306803286075592, -0.298546701669693, -0.27197909355163574, -0.2132357954978943, -0.1686936914920807, -0.8885613083839417, 0.3101802170276642, 0.30460125207901, 1.5888088941574097, -0.16567571461200714, -0.1717703640460968, 0.12184818834066391, -0.12468358874320984, 0.9977180361747742, -0.8602684736251831, 0.3669624924659729, 0.7585542798042297, 0.14272308349609375, -0.17174790799617767, -0.5653929710388184, -0.6449155211448669, 0.07145431637763977, -0.3359992504119873, 0.3912389278411865, -0.0453338697552681, -0.17801852524280548, 0.3485056757926941, 0.692325234413147, -0.6543081402778625, 0.17777791619300842, -0.6389968395233154, -0.18240800499916077, 1.0765737295150757, 0.37721601128578186, 0.09108729660511017, -0.37571266293525696, -0.6949372887611389, -0.6481923460960388, -0.4308735728263855, 0.2308870404958725, 0.40372732281684875, 0.35154616832733154, -0.3934599459171295, 0.6929126977920532, -0.3741399049758911, 0.5473663210868835, 0.40244588255882263, 0.030128367245197296, 0.8320550322532654, -0.7251881957054138, -0.5382210612297058, -0.09772563725709915, 1.1191154718399048, 0.6124723553657532, 0.05160043388605118, 0.22247013449668884, 0.03362692892551422, -0.10747680068016052, 0.002419104566797614, -0.8725219368934631, -0.2820979654788971, 0.20443610846996307, -0.33395108580589294, -0.4639745354652405, 0.33910632133483887, -0.8611220121383667, 0.1353648453950882, -0.02748560719192028, 0.41717830300331116, -0.5285038352012634, -0.1362236589193344, 0.21125264465808868, -0.42981669306755066, 0.8204963207244873, -0.14926479756832123, -0.809965193271637, 0.37322503328323364, 0.5250129699707031, 0.7878928184509277, -0.05985189974308014, -0.4760243594646454, -0.11048588901758194, -0.09984056651592255, -0.3144257068634033, 0.5477942824363708, -0.25301921367645264, -0.41441136598587036, -0.30380919575691223, 0.3074098229408264, -0.24472156167030334, -0.37695804238319397, 0.6706762909889221, -0.22655850648880005, 0.18276292085647583, -0.43681228160858154, -0.6676673889160156, 0.17899608612060547, 0.39181411266326904, -0.4131909906864166, 1.298533320426941, 0.25435522198677063, -0.8049209713935852, 0.43252629041671753, -0.5811256170272827, -0.11513342708349228, 0.021395739167928696, -0.06441312283277512, -0.7864506840705872, -0.23031972348690033, 0.17237286269664764, 0.43001341819763184, -0.1363758146762848, -0.12436501681804657, -0.3713037073612213, -0.36949434876441956, 0.3141143023967743, -0.15101498365402222, 1.2313718795776367, -0.047949112951755524, -0.7741101980209351, -0.09661291539669037, -1.2098350524902344, 0.27992209792137146, 0.22557303309440613, -0.3242416977882385, -0.18720215559005737, -0.5403380990028381, -0.02481936477124691, 0.1712125837802887, 0.27532270550727844, -0.8161560297012329, 0.2971074879169464, -0.3373661935329437, 0.15332084894180298, 1.2651774883270264, 0.05587387830018997, 0.14379781484603882, -0.5396950840950012, 0.5442618727684021, 0.18133696913719177, 0.23584716022014618, 0.3744010031223297, -0.5857868790626526, -0.8064919114112854, -0.5156023502349854, -0.03484572470188141, 0.5967739820480347, -0.23576034605503082, 1.1097872257232666, 0.08935859054327011, -0.9200080037117004, -0.48140501976013184, -0.13030046224594116, 0.5380862355232239, 0.7440884709358215, 0.6385418772697449, -0.044464047998189926, -0.6441653966903687, -1.0916271209716797, -0.30044326186180115, -0.1624251753091812, 0.13910531997680664, 0.20524302124977112, 1.0262970924377441, -0.24522888660430908, 0.577806830406189, -0.998802125453949, -0.21760058403015137, 0.13091795146465302, -0.07395920902490616, 0.7631899118423462, 0.7560451626777649, 0.5808528065681458, -0.6730184555053711, -0.5086711645126343, 0.19863347709178925, -0.8993163108825684, -0.07621347904205322, 0.09295685589313507, -0.3349733054637909, 0.14672984182834625, 0.16569304466247559, -0.7211501598358154, 0.5366480350494385, 0.2048443853855133, -1.0601831674575806, 1.0911834239959717, -0.3394668400287628, 0.5756351351737976, -0.9971240758895874, 0.1611935943365097, -0.039561983197927475, 0.029258795082569122, -0.4961983561515808, 0.10456372052431107, 0.09901265799999237, 0.46794643998146057, -0.5365638732910156, 0.8298130035400391, -0.6940908432006836, -0.05847139284014702, 0.45618900656700134, 0.10942300409078598, -0.09857311099767685, 0.336046427488327, -0.19294564425945282, 0.7712103128433228, 0.7473486065864563, -0.4572238326072693, 0.5585528016090393, 0.39334186911582947, -0.2192980945110321, 0.7252299189567566, -0.4808647632598877, -0.2920554280281067, 0.3048197329044342, -0.023114614188671112, -0.829167366027832, -0.4741773307323456, 0.028871547430753708, -0.6190437078475952, -0.10744606703519821, 0.37601760029792786, -0.30422553420066833, -0.7842845916748047, -0.9386295676231384, 0.3324699401855469, 0.6546069979667664, -0.4065735936164856, -0.18664541840553284, 0.049230851233005524, 0.10412092506885529, -0.7899634838104248, -0.8637230396270752, -0.4886380136013031, -0.20445632934570312, -0.656602144241333, 0.3239997923374176, -0.2717951238155365, -0.2667314112186432, -0.0974213033914566, -0.23427367210388184, -0.30223676562309265, -0.028406323865056038, 0.13326041400432587, 0.7204233407974243, -0.3839768171310425, -0.30068618059158325, -0.26747986674308777, -0.19312390685081482, 0.1977626383304596, -0.08830145746469498, 0.3947121202945709, -0.4656555950641632, -0.34488385915756226, -0.4797306954860687, -0.03198084607720375, 0.7347866892814636, -0.05734549090266228, 0.7596924901008606, 0.4364554286003113, -0.3085134029388428, 0.0011992603540420532, -0.2914625108242035, -0.28838205337524414, -0.577122151851654, 0.26603999733924866, -0.5159029364585876, -1.0428860187530518, 0.8178313374519348, 0.5955085754394531, 0.08227448165416718, 1.1405516862869263, 0.6053056716918945, -0.30801457166671753, 0.9905562996864319, 0.037221938371658325, 0.35135015845298767, 0.43962520360946655, -0.7644215226173401, 0.08950354158878326, -0.9349055290222168, -0.32957923412323, -0.5528964400291443, -0.48005253076553345, -0.7107603549957275, -0.10295165330171585, 0.27196866273880005, 0.17247942090034485, -0.6862684488296509, 0.638017475605011, -0.8180072903633118, 0.5897171497344971, 0.5952768921852112, 0.2886359393596649, 0.16357724368572235, -0.16600173711776733, -0.3970142602920532, -0.14564557373523712, -0.4842959940433502, -0.20233064889907837, 1.2143425941467285, 0.2563801109790802, 0.6864680051803589, 0.06619026511907578, 0.9059193134307861, 0.07176429778337479, -0.05304618924856186, -0.5740386247634888, 0.6802683472633362, 0.1098877564072609, -0.8606484532356262, -0.44464951753616333, -0.5181546211242676, -1.0665401220321655, 0.3994949460029602, -0.15479527413845062, -0.8480910658836365, 0.13723543286323547, 0.04213891550898552, -0.22648932039737701, 0.492844820022583, -0.5390515327453613, 0.855570912361145, -0.12837311625480652, -0.5306867361068726, 0.09950102865695953, -0.8322869539260864, 0.445218563079834, 0.22790437936782837, 0.2597276270389557, 0.0633208230137825, 0.2508108913898468, 1.1694258451461792, -0.8561300039291382, 0.40316733717918396, 0.06286504864692688, 0.049206048250198364, 0.33357805013656616, -0.1462065577507019, 0.4865851104259491, 0.0746721476316452, -0.005004906095564365, -0.10122563689947128, 0.2959105372428894, -0.8618696928024292, -0.10621101409196854, 0.9199504256248474, -0.9538489580154419, -0.5737957954406738, -0.864432692527771, -0.5297368168830872, 0.08059824258089066, 0.5719960331916809, 0.43825849890708923, 0.5766974687576294, 0.01972769945859909, 0.4520810544490814, 0.8229307532310486, -0.11782944947481155, 0.5872536301612854, 0.2438029944896698, 0.05853408947587013, -0.6584532856941223, 0.8710499405860901, 0.13791391253471375, 0.37093910574913025, 0.2993657886981964, 0.40699654817581177, -0.5477843880653381, -0.21888433396816254, -0.21849389374256134, 0.506051778793335, -0.6071375012397766, -0.282551109790802, -0.33559009432792664, -0.36883726716041565, -0.767137885093689, -0.6427392959594727, -0.3545338213443756, -0.49829956889152527, -0.4625775218009949, -0.5235620737075806, 0.5832105875015259, 0.4475792348384857, -0.40229618549346924, 0.038419824093580246, -0.5150110721588135, 0.2790389657020569, 0.33822354674339294, 0.523023247718811, -0.388086199760437, -0.5963605046272278, 0.02718944102525711, -0.15100593864917755, -0.5608425736427307, -0.9503357410430908, 0.34249556064605713, -0.06374014914035797, 0.5128255486488342, 0.5640788078308105, 0.06229046359658241, 0.8800956010818481, -0.1981039196252823, 1.0631978511810303, 0.36088821291923523, -0.7701706290245056, 0.7531078457832336, -0.3432537615299225, 0.20470090210437775, 0.6719796061515808, 0.20345287024974823, -0.1542978435754776, -0.7252660393714905, -1.3461064100265503, -0.8091438412666321, 0.641372561454773, 0.3854798674583435, -0.2613675594329834, 0.04327382519841194, 0.14997538924217224, -0.30833208560943604, -0.19096708297729492, -0.6808149814605713, -0.8722648024559021, -0.15274567902088165, -0.47607800364494324, 0.09681853652000427, 0.07211066037416458, -0.3810351490974426, -0.8464841246604919, 0.9330506324768066, 0.028439544141292572, 0.5937979817390442, 0.5067349076271057, 0.04766099900007248, 0.06564096361398697, 0.460382878780365, 0.897898256778717, 0.7452642917633057, -0.4930325448513031, 0.4099659323692322, 0.3776765465736389, -1.048531413078308, 0.46219930052757263, 0.29051095247268677, -0.08263162523508072, -0.01665453426539898, 0.4937589466571808, 0.45218053460121155, 0.07482385635375977, -0.2096109241247177, 0.6089669466018677, -0.012303898110985756, -0.5638378262519836, -0.3421664237976074, 0.11817936599254608, -0.1377345621585846, 0.032508403062820435, 0.3982551097869873, -0.18941107392311096, -0.039202384650707245, -0.5485571622848511, 0.4593493342399597, 0.37153536081314087, -0.48921605944633484, -0.16065777838230133, 0.7264560461044312, -0.1942795217037201, -0.18538765609264374, 0.32652461528778076, -0.17623496055603027, -0.6131911873817444, 1.1419566869735718, 0.5898898839950562, 0.7081519961357117, -0.2759476602077484, -0.057297151535749435, 0.916064977645874, 0.3946944773197174, -0.04442504048347473, 0.5057705640792847, 0.31310269236564636, -0.2663724422454834, 0.17843562364578247, -0.8697729110717773, -0.07511714100837708, 0.19819608330726624, -0.846502423286438, 0.3120926320552826, -0.5530461072921753, -0.20316727459430695, 0.031217502430081367, 0.4262818992137909, -0.46814706921577454, 0.5713611841201782, -0.4106158912181854, 1.2331777811050415, -0.9902434349060059, 0.6893820762634277, 0.7567469477653503, -0.5628834366798401, -1.0256669521331787, -0.5389311909675598, 0.034375015646219254, -0.8014673590660095, 0.5303619503974915, -0.03453834727406502, 0.15860934555530548, -0.07372912019491196, -0.7402750849723816, -0.8515042662620544, 1.4024325609207153, -0.019716352224349976, -0.45378780364990234, 0.23698338866233826, -0.10562845319509506, 0.4773140847682953, 0.13780052959918976, 0.6210262775421143, 0.7644879817962646, 0.8005895614624023, -0.08855833113193512, -0.7463480234146118, 0.324480801820755, -0.5170335173606873, -0.30005285143852234, 0.43897175788879395, -0.965964674949646, 1.1514086723327637, 0.00523253483697772, 0.21041205525398254, -0.16838492453098297, 0.6527090072631836, 0.8358312845230103, 0.2617986500263214, 0.3292783796787262, 0.9038743376731873, 0.8081079721450806, -0.4786340296268463, 1.0430256128311157, -0.18603983521461487, 0.8788422346115112, 0.7219679355621338, 0.25788015127182007, 0.7946825623512268, 0.667411744594574, -0.5876508951187134, 0.5633524060249329, 0.8001664280891418, -0.2955861985683441, 0.398820161819458, 0.24149729311466217, -0.11278890073299408, -0.14937816560268402, 0.4251959025859833, -0.8748121857643127, 0.10892359167337418, 0.07529672980308533, -0.3238331377506256, 0.10292794555425644, -0.4475683867931366, 0.3057422935962677, -0.10537347942590714, -0.07927556335926056, 0.37919604778289795, 0.03686727583408356, -0.44305941462516785, 0.9456906914710999, -0.1853926032781601, 0.7534198760986328, -0.540639340877533, -0.07337336242198944, -0.43225863575935364, 0.5881290435791016, -0.44693347811698914, -1.0579890012741089, 0.17465464770793915, 0.06797708570957184, -0.11692090332508087, -0.1606602519750595, 0.6843087673187256, -0.18334786593914032, -0.767935037612915, 0.14722181856632233, 0.04316318407654762, 0.11318610608577728, 0.5066235661506653, -0.6853088140487671, -0.2957366406917572, -0.07481830567121506, -0.5603931546211243, 0.14289823174476624, 0.2822593152523041, 0.29797467589378357, 0.5262218117713928, 0.6175747513771057, 0.1603342592716217, 0.41835346817970276, -0.5338628888130188, 0.7889347076416016, -1.0516409873962402, -0.7393453121185303, -0.9377794861793518, 0.4734954237937927, -0.32075822353363037, -0.8963938355445862, 1.0141443014144897, 1.0444319248199463, 0.8737466335296631, 0.008719807490706444, 0.6305133104324341, -0.3697512745857239, 0.24527513980865479, -0.3991818130016327, 0.8959237337112427, -0.8655158281326294, -0.2339712381362915, -0.2915842533111572, -0.728583812713623, -0.39975205063819885, 0.8416964411735535, -0.17938083410263062, 0.01866845227777958, 1.0562373399734497, 0.7046492695808411, -0.10562457144260406, 0.043004587292671204, -0.03689011186361313, 0.5960016250610352, 0.36522725224494934, 0.9885820150375366, 0.6340214610099792, -0.8091257214546204, 0.3270012438297272, -0.48764824867248535, -0.45515120029449463, -0.42395004630088806, -0.4477180242538452, -0.82246994972229, -0.46841320395469666, -0.21949344873428345, -0.626409649848938, -0.083674855530262, 0.9606665372848511, 0.46936196088790894, -0.9354792833328247, -0.407259464263916, -0.17431126534938812, 0.1291569620370865, -0.5944218039512634, -0.42184555530548096, 0.7122506499290466, -0.0945746973156929, -0.565406858921051, 0.1899774819612503, -0.09519235044717789, 0.21495623886585236, 0.07791691273450851, -0.4009105861186981, -0.7306956052780151, 0.03017451986670494, 0.41837063431739807, 0.37834906578063965, -0.6766366362571716, -0.6980270147323608, 0.30851539969444275, -0.5048379898071289, 0.4117175042629242, -0.00889069028198719, -0.5027812123298645, 0.08423496782779694, 0.695751428604126, 0.4566199481487274, 0.7078390121459961, -0.057430196553468704, 0.08906804770231247, -0.6491172909736633, 0.16523995995521545, -0.013366957195103168, 0.28384488821029663, -0.024454303085803986, -0.3060254752635956, 0.7738294005393982, 0.6903287768363953, -0.5246853232383728, -1.1005823612213135, -0.42117759585380554, -1.3974995613098145, -0.03361337259411812, 1.121537446975708, -0.0019501327769830823, -0.4795965254306793, 0.23772944509983063, -0.12360767275094986, 0.2178821712732315, -0.2900103032588959, 0.7134202122688293, 0.7852592468261719, -0.392289400100708, 0.05182988569140434, -0.6520195007324219, 0.3631002604961395, 0.5386754274368286, -1.1656992435455322, -0.07265301793813705, 0.25377750396728516, 0.3053046464920044, 0.37826505303382874, 0.6779634356498718, -0.1263924539089203, 0.30011576414108276, 0.24984653294086456, 0.046560708433389664, -0.011534922756254673, 0.007622709032148123, -0.2318274825811386, 0.07470192015171051, -0.28747835755348206, -0.4504382014274597 ]
open-llm-leaderboard/details_TheBloke__GPlatty-30B-SuperHOT-8K-fp16
open-llm-leaderboard
2023-08-27T12:34:20Z
201
0
[ "region:us" ]
null
2023-08-18T11:29:53Z
--- pretty_name: Evaluation run of TheBloke/GPlatty-30B-SuperHOT-8K-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/GPlatty-30B-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/GPlatty-30B-SuperHOT-8K-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__GPlatty-30B-SuperHOT-8K-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-01T15:51:23.628970](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__GPlatty-30B-SuperHOT-8K-fp16/blob/main/results_2023-08-01T15%3A51%3A23.628970.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24941704039386783,\n\ \ \"acc_stderr\": 0.0314384194357432,\n \"acc_norm\": 0.2512238671780757,\n\ \ \"acc_norm_stderr\": 0.03145763914734606,\n \"mc1\": 0.22888616891064872,\n\ \ \"mc1_stderr\": 0.014706994909055027,\n \"mc2\": 0.46272712607124966,\n\ \ \"mc2_stderr\": 0.016702158477967525\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.22696245733788395,\n \"acc_stderr\": 0.012240491536132868,\n\ \ \"acc_norm\": 0.2832764505119454,\n \"acc_norm_stderr\": 0.013167478735134576\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.28450507866958774,\n\ \ \"acc_stderr\": 0.004502563079349398,\n \"acc_norm\": 0.33479386576379205,\n\ \ \"acc_norm_stderr\": 0.0047095388649163105\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2518518518518518,\n\ \ \"acc_stderr\": 0.037498507091740206,\n \"acc_norm\": 0.2518518518518518,\n\ \ \"acc_norm_stderr\": 0.037498507091740206\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.19736842105263158,\n \"acc_stderr\": 0.03238981601699397,\n\ \ \"acc_norm\": 0.19736842105263158,\n \"acc_norm_stderr\": 0.03238981601699397\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.31,\n\ \ \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.22641509433962265,\n \"acc_stderr\": 0.02575755989310675,\n\ \ \"acc_norm\": 0.22641509433962265,\n \"acc_norm_stderr\": 0.02575755989310675\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2361111111111111,\n\ \ \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.2361111111111111,\n\ \ \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \ \ \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.04389869956808777,\n\ \ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.04389869956808777\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n\ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.2851063829787234,\n \"acc_stderr\": 0.029513196625539355,\n\ \ \"acc_norm\": 0.2851063829787234,\n \"acc_norm_stderr\": 0.029513196625539355\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21052631578947367,\n\ \ \"acc_stderr\": 0.03835153954399421,\n \"acc_norm\": 0.21052631578947367,\n\ \ \"acc_norm_stderr\": 0.03835153954399421\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.037245636197746325,\n\ \ \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.037245636197746325\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.21428571428571427,\n \"acc_stderr\": 0.02113285918275444,\n \"\ acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.02113285918275444\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.2645161290322581,\n \"acc_stderr\": 0.025091892378859275,\n \"\ acc_norm\": 0.2645161290322581,\n \"acc_norm_stderr\": 0.025091892378859275\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.22167487684729065,\n \"acc_stderr\": 0.029225575892489624,\n \"\ acc_norm\": 0.22167487684729065,\n \"acc_norm_stderr\": 0.029225575892489624\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\"\ : 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.21717171717171718,\n \"acc_stderr\": 0.029376616484945633,\n \"\ acc_norm\": 0.21717171717171718,\n \"acc_norm_stderr\": 0.029376616484945633\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.18134715025906736,\n \"acc_stderr\": 0.02780703236068609,\n\ \ \"acc_norm\": 0.18134715025906736,\n \"acc_norm_stderr\": 0.02780703236068609\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.2717948717948718,\n \"acc_stderr\": 0.022556551010132354,\n\ \ \"acc_norm\": 0.2717948717948718,\n \"acc_norm_stderr\": 0.022556551010132354\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2037037037037037,\n \"acc_stderr\": 0.024556172219141265,\n \ \ \"acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.024556172219141265\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.027025433498882385,\n\ \ \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.027025433498882385\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.19205298013245034,\n \"acc_stderr\": 0.032162984205936135,\n \"\ acc_norm\": 0.19205298013245034,\n \"acc_norm_stderr\": 0.032162984205936135\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.22568807339449543,\n \"acc_stderr\": 0.017923087667803053,\n \"\ acc_norm\": 0.22568807339449543,\n \"acc_norm_stderr\": 0.017923087667803053\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.26851851851851855,\n \"acc_stderr\": 0.030225226160012397,\n \"\ acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.030225226160012397\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.24509803921568626,\n \"acc_stderr\": 0.03019028245350195,\n \"\ acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.03019028245350195\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.2869198312236287,\n \"acc_stderr\": 0.029443773022594693,\n \ \ \"acc_norm\": 0.2869198312236287,\n \"acc_norm_stderr\": 0.029443773022594693\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.2914798206278027,\n\ \ \"acc_stderr\": 0.030500283176545902,\n \"acc_norm\": 0.2914798206278027,\n\ \ \"acc_norm_stderr\": 0.030500283176545902\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2748091603053435,\n \"acc_stderr\": 0.03915345408847835,\n\ \ \"acc_norm\": 0.2748091603053435,\n \"acc_norm_stderr\": 0.03915345408847835\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2727272727272727,\n \"acc_stderr\": 0.04065578140908705,\n \"\ acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.04065578140908705\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.26851851851851855,\n\ \ \"acc_stderr\": 0.04284467968052191,\n \"acc_norm\": 0.26851851851851855,\n\ \ \"acc_norm_stderr\": 0.04284467968052191\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.26993865030674846,\n \"acc_stderr\": 0.03487825168497892,\n\ \ \"acc_norm\": 0.26993865030674846,\n \"acc_norm_stderr\": 0.03487825168497892\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2863247863247863,\n\ \ \"acc_stderr\": 0.029614323690456648,\n \"acc_norm\": 0.2863247863247863,\n\ \ \"acc_norm_stderr\": 0.029614323690456648\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.3065134099616858,\n\ \ \"acc_stderr\": 0.01648695289304151,\n \"acc_norm\": 0.3065134099616858,\n\ \ \"acc_norm_stderr\": 0.01648695289304151\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.022698657167855716,\n\ \ \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.022698657167855716\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24581005586592178,\n\ \ \"acc_stderr\": 0.014400296429225629,\n \"acc_norm\": 0.24581005586592178,\n\ \ \"acc_norm_stderr\": 0.014400296429225629\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.3104575163398693,\n \"acc_stderr\": 0.026493033225145894,\n\ \ \"acc_norm\": 0.3104575163398693,\n \"acc_norm_stderr\": 0.026493033225145894\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n\ \ \"acc_stderr\": 0.025218040373410612,\n \"acc_norm\": 0.27009646302250806,\n\ \ \"acc_norm_stderr\": 0.025218040373410612\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2345679012345679,\n \"acc_stderr\": 0.023576881744005716,\n\ \ \"acc_norm\": 0.2345679012345679,\n \"acc_norm_stderr\": 0.023576881744005716\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.25177304964539005,\n \"acc_stderr\": 0.025892151156709405,\n \ \ \"acc_norm\": 0.25177304964539005,\n \"acc_norm_stderr\": 0.025892151156709405\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.26597131681877445,\n\ \ \"acc_stderr\": 0.011285033165551274,\n \"acc_norm\": 0.26597131681877445,\n\ \ \"acc_norm_stderr\": 0.011285033165551274\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.02315746830855934,\n\ \ \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.02315746830855934\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.27450980392156865,\n \"acc_stderr\": 0.018054027458815198,\n \ \ \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.018054027458815198\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2,\n\ \ \"acc_stderr\": 0.03831305140884601,\n \"acc_norm\": 0.2,\n \ \ \"acc_norm_stderr\": 0.03831305140884601\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.20816326530612245,\n \"acc_stderr\": 0.025991117672813292,\n\ \ \"acc_norm\": 0.20816326530612245,\n \"acc_norm_stderr\": 0.025991117672813292\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.26865671641791045,\n\ \ \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.26865671641791045,\n\ \ \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2710843373493976,\n\ \ \"acc_stderr\": 0.03460579907553027,\n \"acc_norm\": 0.2710843373493976,\n\ \ \"acc_norm_stderr\": 0.03460579907553027\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.29239766081871343,\n \"acc_stderr\": 0.034886477134579215,\n\ \ \"acc_norm\": 0.29239766081871343,\n \"acc_norm_stderr\": 0.034886477134579215\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22888616891064872,\n\ \ \"mc1_stderr\": 0.014706994909055027,\n \"mc2\": 0.46272712607124966,\n\ \ \"mc2_stderr\": 0.016702158477967525\n }\n}\n```" repo_url: https://huggingface.co/TheBloke/GPlatty-30B-SuperHOT-8K-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|arc:challenge|25_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hellaswag|10_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:51:23.628970.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:51:23.628970.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_01T15_51_23.628970 path: - '**/details_harness|truthfulqa:mc|0_2023-08-01T15:51:23.628970.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-01T15:51:23.628970.parquet' - config_name: results data_files: - split: 2023_08_01T15_51_23.628970 path: - results_2023-08-01T15:51:23.628970.parquet - split: latest path: - results_2023-08-01T15:51:23.628970.parquet --- # Dataset Card for Evaluation run of TheBloke/GPlatty-30B-SuperHOT-8K-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/GPlatty-30B-SuperHOT-8K-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/GPlatty-30B-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/GPlatty-30B-SuperHOT-8K-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__GPlatty-30B-SuperHOT-8K-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-01T15:51:23.628970](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__GPlatty-30B-SuperHOT-8K-fp16/blob/main/results_2023-08-01T15%3A51%3A23.628970.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24941704039386783, "acc_stderr": 0.0314384194357432, "acc_norm": 0.2512238671780757, "acc_norm_stderr": 0.03145763914734606, "mc1": 0.22888616891064872, "mc1_stderr": 0.014706994909055027, "mc2": 0.46272712607124966, "mc2_stderr": 0.016702158477967525 }, "harness|arc:challenge|25": { "acc": 0.22696245733788395, "acc_stderr": 0.012240491536132868, "acc_norm": 0.2832764505119454, "acc_norm_stderr": 0.013167478735134576 }, "harness|hellaswag|10": { "acc": 0.28450507866958774, "acc_stderr": 0.004502563079349398, "acc_norm": 0.33479386576379205, "acc_norm_stderr": 0.0047095388649163105 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2518518518518518, "acc_stderr": 0.037498507091740206, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.037498507091740206 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.22641509433962265, "acc_stderr": 0.02575755989310675, "acc_norm": 0.22641509433962265, "acc_norm_stderr": 0.02575755989310675 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2361111111111111, "acc_stderr": 0.03551446610810826, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.04389869956808777, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.04389869956808777 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2851063829787234, "acc_stderr": 0.029513196625539355, "acc_norm": 0.2851063829787234, "acc_norm_stderr": 0.029513196625539355 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.03835153954399421, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.03835153954399421 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.037245636197746325, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.037245636197746325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.21428571428571427, "acc_stderr": 0.02113285918275444, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.02113285918275444 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2645161290322581, "acc_stderr": 0.025091892378859275, "acc_norm": 0.2645161290322581, "acc_norm_stderr": 0.025091892378859275 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.22167487684729065, "acc_stderr": 0.029225575892489624, "acc_norm": 0.22167487684729065, "acc_norm_stderr": 0.029225575892489624 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.21717171717171718, "acc_stderr": 0.029376616484945633, "acc_norm": 0.21717171717171718, "acc_norm_stderr": 0.029376616484945633 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.18134715025906736, "acc_stderr": 0.02780703236068609, "acc_norm": 0.18134715025906736, "acc_norm_stderr": 0.02780703236068609 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2717948717948718, "acc_stderr": 0.022556551010132354, "acc_norm": 0.2717948717948718, "acc_norm_stderr": 0.022556551010132354 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.024556172219141265, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.024556172219141265 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.027025433498882385, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.027025433498882385 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.19205298013245034, "acc_stderr": 0.032162984205936135, "acc_norm": 0.19205298013245034, "acc_norm_stderr": 0.032162984205936135 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22568807339449543, "acc_stderr": 0.017923087667803053, "acc_norm": 0.22568807339449543, "acc_norm_stderr": 0.017923087667803053 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.26851851851851855, "acc_stderr": 0.030225226160012397, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.030225226160012397 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24509803921568626, "acc_stderr": 0.03019028245350195, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.03019028245350195 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2869198312236287, "acc_stderr": 0.029443773022594693, "acc_norm": 0.2869198312236287, "acc_norm_stderr": 0.029443773022594693 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.2914798206278027, "acc_stderr": 0.030500283176545902, "acc_norm": 0.2914798206278027, "acc_norm_stderr": 0.030500283176545902 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.03915345408847835, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.03915345408847835 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2727272727272727, "acc_stderr": 0.04065578140908705, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.26851851851851855, "acc_stderr": 0.04284467968052191, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.04284467968052191 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26993865030674846, "acc_stderr": 0.03487825168497892, "acc_norm": 0.26993865030674846, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2863247863247863, "acc_stderr": 0.029614323690456648, "acc_norm": 0.2863247863247863, "acc_norm_stderr": 0.029614323690456648 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.3065134099616858, "acc_stderr": 0.01648695289304151, "acc_norm": 0.3065134099616858, "acc_norm_stderr": 0.01648695289304151 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23121387283236994, "acc_stderr": 0.022698657167855716, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.022698657167855716 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.014400296429225629, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.014400296429225629 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.3104575163398693, "acc_stderr": 0.026493033225145894, "acc_norm": 0.3104575163398693, "acc_norm_stderr": 0.026493033225145894 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.025218040373410612, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.025218040373410612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2345679012345679, "acc_stderr": 0.023576881744005716, "acc_norm": 0.2345679012345679, "acc_norm_stderr": 0.023576881744005716 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.25177304964539005, "acc_stderr": 0.025892151156709405, "acc_norm": 0.25177304964539005, "acc_norm_stderr": 0.025892151156709405 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.26597131681877445, "acc_stderr": 0.011285033165551274, "acc_norm": 0.26597131681877445, "acc_norm_stderr": 0.011285033165551274 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.17647058823529413, "acc_stderr": 0.02315746830855934, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.02315746830855934 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27450980392156865, "acc_stderr": 0.018054027458815198, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.018054027458815198 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2, "acc_stderr": 0.03831305140884601, "acc_norm": 0.2, "acc_norm_stderr": 0.03831305140884601 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.20816326530612245, "acc_stderr": 0.025991117672813292, "acc_norm": 0.20816326530612245, "acc_norm_stderr": 0.025991117672813292 }, "harness|hendrycksTest-sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208954, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553027, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.29239766081871343, "acc_stderr": 0.034886477134579215, "acc_norm": 0.29239766081871343, "acc_norm_stderr": 0.034886477134579215 }, "harness|truthfulqa:mc|0": { "mc1": 0.22888616891064872, "mc1_stderr": 0.014706994909055027, "mc2": 0.46272712607124966, "mc2_stderr": 0.016702158477967525 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7105635404586792, -0.8342598676681519, 0.2714257538318634, 0.16547422111034393, -0.15396156907081604, -0.02801419235765934, 0.039300911128520966, -0.23972590267658234, 0.5806369781494141, -0.10407108068466187, -0.507987380027771, -0.6556375622749329, -0.42995142936706543, 0.21883578598499298, -0.03802831470966339, 0.8535020351409912, -0.1931813806295395, -0.11318179219961166, 0.10289108008146286, -0.007694886531680822, -0.2396068572998047, -0.35314375162124634, -0.4786575138568878, -0.3448461592197418, 0.1603134423494339, 0.40750759840011597, 0.4660659432411194, 0.7591472864151001, 0.7263042330741882, 0.3054821789264679, -0.3220290243625641, -0.025722039863467216, -0.18338656425476074, -0.28868961334228516, 0.3635265529155731, -0.3459365963935852, -0.8102871179580688, 0.2713998556137085, 0.7685701251029968, 0.5740978717803955, -0.07468254119157791, 0.2888525426387787, 0.00938205886632204, 0.5530775785446167, -0.36863917112350464, 0.04368848353624344, -0.29184702038764954, 0.23868344724178314, -0.2038068026304245, -0.2553691267967224, -0.2751808166503906, -0.18113616108894348, -0.15854932367801666, -0.8829227685928345, 0.31366440653800964, 0.3152238428592682, 1.6132057905197144, -0.18244744837284088, -0.1653451770544052, 0.11801532655954361, -0.11606710404157639, 1.0158934593200684, -0.8690155148506165, 0.3835327923297882, 0.7449492812156677, 0.1428360939025879, -0.1569048911333084, -0.582149088382721, -0.6430570483207703, 0.043974705040454865, -0.36636385321617126, 0.3800775408744812, -0.05147232860326767, -0.17668023705482483, 0.37222397327423096, 0.7033153176307678, -0.6833867430686951, 0.15109434723854065, -0.6481152772903442, -0.212448850274086, 1.0768077373504639, 0.3585148751735687, 0.10573265701532364, -0.3496231138706207, -0.6860120296478271, -0.6216012835502625, -0.4355771839618683, 0.2537447512149811, 0.42934852838516235, 0.3319050073623657, -0.38391369581222534, 0.6942590475082397, -0.3960021436214447, 0.5620516538619995, 0.43640491366386414, 0.035530637949705124, 0.8504091501235962, -0.7417545914649963, -0.5383643507957458, -0.09066810458898544, 1.1013848781585693, 0.5989096760749817, 0.07546710968017578, 0.2040342539548874, 0.032790835946798325, -0.09446244686841965, 0.012052170932292938, -0.8862336874008179, -0.3331892788410187, 0.18571218848228455, -0.3417488932609558, -0.44514766335487366, 0.3370620310306549, -0.8525561094284058, 0.1760832518339157, -0.03157990425825119, 0.3718791902065277, -0.5186164975166321, -0.13208888471126556, 0.20723339915275574, -0.4286872446537018, 0.826741099357605, -0.14750990271568298, -0.8289863467216492, 0.36589109897613525, 0.5130659937858582, 0.7696024775505066, -0.060365308076143265, -0.45861107110977173, -0.10038385540246964, -0.08633258938789368, -0.3058446943759918, 0.540751576423645, -0.2947302460670471, -0.41667816042900085, -0.30034202337265015, 0.30970829725265503, -0.25142911076545715, -0.36296266317367554, 0.6818059086799622, -0.23647814989089966, 0.1807207465171814, -0.4329500198364258, -0.6695351600646973, 0.18318074941635132, 0.3879934251308441, -0.39680150151252747, 1.27085542678833, 0.25830885767936707, -0.8128958344459534, 0.45405295491218567, -0.582819938659668, -0.12803763151168823, 0.04497263580560684, -0.05330192297697067, -0.794188380241394, -0.23736350238323212, 0.18698517978191376, 0.4199443459510803, -0.09565918892621994, -0.11164600402116776, -0.40075644850730896, -0.3668825924396515, 0.3273192346096039, -0.1396649181842804, 1.206567406654358, -0.033889733254909515, -0.7506041526794434, -0.10057465732097626, -1.1994893550872803, 0.29105332493782043, 0.2096318006515503, -0.3425664007663727, -0.1548955738544464, -0.5288469791412354, -0.012100269086658955, 0.16850246489048004, 0.2666170597076416, -0.8295225501060486, 0.2891519367694855, -0.35570386052131653, 0.17518915235996246, 1.2523484230041504, 0.03634118288755417, 0.16882938146591187, -0.5161910653114319, 0.5329194068908691, 0.19362784922122955, 0.2159610539674759, 0.3723249137401581, -0.5950042009353638, -0.7588803172111511, -0.505746066570282, -0.03171219304203987, 0.6122400164604187, -0.2306545078754425, 1.104842185974121, 0.10583388060331345, -0.8851807117462158, -0.4742389917373657, -0.10946948081254959, 0.5223571062088013, 0.7571051120758057, 0.6340746879577637, -0.03870045766234398, -0.6062045693397522, -1.0848263502120972, -0.3103732466697693, -0.15311481058597565, 0.1292564421892166, 0.22077567875385284, 1.0391135215759277, -0.24398712813854218, 0.5716288685798645, -0.9921125173568726, -0.2079162448644638, 0.11863112449645996, -0.0734449028968811, 0.7972316741943359, 0.7414455413818359, 0.58585524559021, -0.6843993663787842, -0.5531250238418579, 0.20604942739009857, -0.8720922470092773, -0.09817279130220413, 0.1045343428850174, -0.2941049337387085, 0.1561477780342102, 0.14607247710227966, -0.7308072447776794, 0.53226637840271, 0.2148776799440384, -1.0820649862289429, 1.1033941507339478, -0.3379581868648529, 0.577851414680481, -0.96372389793396, 0.18427318334579468, -0.041202373802661896, 0.02851800061762333, -0.5142020583152771, 0.07940102368593216, 0.09539038687944412, 0.43099072575569153, -0.533182680606842, 0.8293262124061584, -0.7197339534759521, -0.06866514682769775, 0.4627877175807953, 0.11782870441675186, -0.11093169450759888, 0.3311060070991516, -0.20123784244060516, 0.7967327237129211, 0.7621122002601624, -0.4270254075527191, 0.5430122017860413, 0.3982786536216736, -0.2107245922088623, 0.7355342507362366, -0.4823909401893616, -0.3050997853279114, 0.30717208981513977, -0.029253361746668816, -0.8283029794692993, -0.4887634515762329, 0.042815253138542175, -0.6074463129043579, -0.0633392259478569, 0.37152546644210815, -0.27286651730537415, -0.8224001526832581, -0.9466525316238403, 0.325658917427063, 0.6527407765388489, -0.39094120264053345, -0.2017427682876587, 0.05864645540714264, 0.0707424134016037, -0.8136054277420044, -0.8501794934272766, -0.46921342611312866, -0.1829565465450287, -0.6773152351379395, 0.3307081162929535, -0.2856540381908417, -0.2640242874622345, -0.08821330964565277, -0.230598583817482, -0.3069807291030884, -0.038888119161129, 0.12188699096441269, 0.717132568359375, -0.3743453621864319, -0.3299260139465332, -0.28248754143714905, -0.1953102946281433, 0.19268687069416046, -0.08136558532714844, 0.3771032691001892, -0.47328025102615356, -0.37135380506515503, -0.4509499967098236, -0.008105890825390816, 0.7080455422401428, -0.05404629930853844, 0.7457625269889832, 0.4467583894729614, -0.28602227568626404, -0.004251295235008001, -0.28669193387031555, -0.28027817606925964, -0.5781741738319397, 0.27802324295043945, -0.5185231566429138, -1.0699048042297363, 0.819279670715332, 0.5765714645385742, 0.05058257654309273, 1.132362961769104, 0.6217206716537476, -0.3184308111667633, 1.006656527519226, 0.0199715755879879, 0.3295597434043884, 0.42989519238471985, -0.7246737480163574, 0.1293756365776062, -0.9362667798995972, -0.30347806215286255, -0.5469403862953186, -0.495892196893692, -0.6958341002464294, -0.07421422004699707, 0.26023948192596436, 0.17996257543563843, -0.6919927597045898, 0.6061211228370667, -0.8370920419692993, 0.5983794331550598, 0.5766416788101196, 0.26707881689071655, 0.14692805707454681, -0.16970965266227722, -0.39888620376586914, -0.14856363832950592, -0.4720926880836487, -0.20289698243141174, 1.219589352607727, 0.25851601362228394, 0.6980706453323364, 0.0603717640042305, 0.8903432488441467, 0.06977537274360657, -0.04853720963001251, -0.5584849119186401, 0.671631395816803, 0.09606143832206726, -0.8535492420196533, -0.43657445907592773, -0.5200783610343933, -1.0471270084381104, 0.38775765895843506, -0.1324080377817154, -0.8554409146308899, 0.11955945193767548, 0.06616441160440445, -0.21070410311222076, 0.49895334243774414, -0.5563214421272278, 0.8747715950012207, -0.12460818141698837, -0.5256608128547668, 0.11696922779083252, -0.8485082983970642, 0.4436570405960083, 0.22210544347763062, 0.24800066649913788, 0.05170976743102074, 0.2744460999965668, 1.1426719427108765, -0.8400359749794006, 0.3903835415840149, 0.03595197573304176, 0.06569615006446838, 0.34779590368270874, -0.15923751890659332, 0.516965389251709, 0.0648636668920517, 0.009167281910777092, -0.08014336973428726, 0.2946547269821167, -0.8654996156692505, -0.06227296590805054, 0.9173001050949097, -0.9380239844322205, -0.5721062421798706, -0.8579533696174622, -0.5477664470672607, 0.09475834667682648, 0.5709105134010315, 0.4080435037612915, 0.5476624965667725, 0.030995117500424385, 0.453812837600708, 0.8132280111312866, -0.1269311010837555, 0.621703028678894, 0.24062247574329376, 0.062192972749471664, -0.6528672575950623, 0.8681040406227112, 0.12799859046936035, 0.3791430592536926, 0.29218539595603943, 0.4015354812145233, -0.5544481873512268, -0.244010329246521, -0.23707304894924164, 0.4863605201244354, -0.6073296666145325, -0.27263757586479187, -0.32909274101257324, -0.37735435366630554, -0.7820318341255188, -0.6619430184364319, -0.32168716192245483, -0.4879876971244812, -0.4605260193347931, -0.5158646702766418, 0.5703487992286682, 0.4317485988140106, -0.39633655548095703, 0.0396873913705349, -0.5282528400421143, 0.27010640501976013, 0.3394578993320465, 0.5214136838912964, -0.4149065911769867, -0.5915520787239075, 0.03977283090353012, -0.1482722908258438, -0.5707714557647705, -0.9394592642784119, 0.3585970997810364, -0.024302706122398376, 0.5228551030158997, 0.5627171993255615, 0.03872906044125557, 0.8546143174171448, -0.23355111479759216, 1.0786672830581665, 0.3644488751888275, -0.7879832983016968, 0.7369062900543213, -0.34378427267074585, 0.21086788177490234, 0.6664248108863831, 0.19122157990932465, -0.15163980424404144, -0.7289189696311951, -1.3360369205474854, -0.8144869208335876, 0.6340922117233276, 0.3813181221485138, -0.26036885380744934, 0.05657971277832985, 0.13906621932983398, -0.3288222849369049, -0.17313508689403534, -0.6950890421867371, -0.8515377044677734, -0.17530028522014618, -0.4931159019470215, 0.11050659418106079, 0.07177872210741043, -0.38986319303512573, -0.8479716777801514, 0.927332878112793, 0.009864180348813534, 0.6026297211647034, 0.49860286712646484, 0.0429772324860096, 0.06752382963895798, 0.4791526794433594, 0.9210869073867798, 0.762691855430603, -0.4842572510242462, 0.41347020864486694, 0.3932412564754486, -1.0510332584381104, 0.4495985209941864, 0.32022902369499207, -0.08872290700674057, -0.022446289658546448, 0.48177456855773926, 0.42930588126182556, 0.0724153071641922, -0.2043035924434662, 0.6192740201950073, 0.02172277122735977, -0.5635232329368591, -0.3283451497554779, 0.09498436748981476, -0.1177898421883583, 0.022327108308672905, 0.400647908449173, -0.20495237410068512, -0.029164772480726242, -0.5271807312965393, 0.4829612374305725, 0.4081451892852783, -0.4713081419467926, -0.18049633502960205, 0.7321898341178894, -0.19533388316631317, -0.17024877667427063, 0.32049721479415894, -0.1983129233121872, -0.6288256049156189, 1.1237143278121948, 0.5924553871154785, 0.7009791135787964, -0.2777441442012787, -0.04846716672182083, 0.8895734548568726, 0.382832795381546, -0.02494034543633461, 0.49953651428222656, 0.30092236399650574, -0.2710995674133301, 0.14132113754749298, -0.8917614817619324, -0.08836143463850021, 0.1638992428779602, -0.7945420145988464, 0.279766708612442, -0.525450587272644, -0.18245020508766174, 0.04334859922528267, 0.41477781534194946, -0.4640653431415558, 0.5573937892913818, -0.3978719711303711, 1.2163363695144653, -0.9854307174682617, 0.7001920342445374, 0.7639245986938477, -0.5616899132728577, -1.0427281856536865, -0.5487210750579834, 0.012883737683296204, -0.8166239857673645, 0.561167299747467, -0.04245826601982117, 0.19540533423423767, -0.08582223951816559, -0.7478416562080383, -0.8637723922729492, 1.3836376667022705, -0.02647566795349121, -0.48935920000076294, 0.23911622166633606, -0.0737985298037529, 0.47480738162994385, 0.14306101202964783, 0.6195050477981567, 0.7450714707374573, 0.8175256848335266, -0.06487917900085449, -0.7568626403808594, 0.3234404921531677, -0.508707582950592, -0.3192533254623413, 0.4756051003932953, -0.9327805638313293, 1.1536383628845215, -0.004066778346896172, 0.21570372581481934, -0.20277319848537445, 0.6219497919082642, 0.7978719472885132, 0.27954572439193726, 0.3096511662006378, 0.9365634322166443, 0.7921793460845947, -0.5069196820259094, 1.0347589254379272, -0.22940875589847565, 0.913438618183136, 0.6886659264564514, 0.21473990380764008, 0.7808285355567932, 0.6767746806144714, -0.618317186832428, 0.5510870814323425, 0.7904260754585266, -0.3229433298110962, 0.40897372364997864, 0.27627402544021606, -0.12368688732385635, -0.1431313455104828, 0.4384446144104004, -0.8808422088623047, 0.07978525012731552, 0.06397821009159088, -0.3299019932746887, 0.06732076406478882, -0.4481387436389923, 0.29621651768684387, -0.122987762093544, -0.08727368712425232, 0.38133037090301514, 0.03527640551328659, -0.4123963713645935, 0.9238718152046204, -0.2111034393310547, 0.7433245778083801, -0.5374102592468262, -0.0711866095662117, -0.40510717034339905, 0.6048942804336548, -0.4468541741371155, -1.0743902921676636, 0.21288368105888367, 0.04521533474326134, -0.12798209488391876, -0.19037538766860962, 0.7365267276763916, -0.200886070728302, -0.7926820516586304, 0.1396513283252716, 0.03351559117436409, 0.10804193466901779, 0.4970894157886505, -0.7072643637657166, -0.30959951877593994, -0.07083959877490997, -0.5680291652679443, 0.15488331019878387, 0.27923348546028137, 0.3204495906829834, 0.514565110206604, 0.6468957662582397, 0.18875829875469208, 0.4154369533061981, -0.554645836353302, 0.8009204864501953, -1.0468710660934448, -0.7214257121086121, -0.9345532655715942, 0.4926008880138397, -0.33443379402160645, -0.8700640201568604, 1.000657320022583, 1.0286725759506226, 0.8910307288169861, -0.0139335747808218, 0.6189837455749512, -0.3787372410297394, 0.260608047246933, -0.3757763206958771, 0.915787935256958, -0.8321599364280701, -0.2408570796251297, -0.2812149226665497, -0.7224907875061035, -0.40527474880218506, 0.8363028764724731, -0.18482699990272522, 0.024881765246391296, 1.0441012382507324, 0.6891168355941772, -0.10437913239002228, 0.045005761086940765, -0.038281116634607315, 0.5939109921455383, 0.38748809695243835, 1.002401351928711, 0.6364007592201233, -0.7768951654434204, 0.3335720896720886, -0.47820931673049927, -0.44652053713798523, -0.40088751912117004, -0.47017720341682434, -0.8241820335388184, -0.42704352736473083, -0.22554750740528107, -0.6023320555686951, -0.08452921360731125, 0.9397248029708862, 0.47118017077445984, -0.9691174626350403, -0.38411641120910645, -0.17651870846748352, 0.1273549199104309, -0.5844494104385376, -0.42566514015197754, 0.7093851566314697, -0.12483982741832733, -0.541948139667511, 0.19368821382522583, -0.08149387687444687, 0.20430441200733185, 0.08632311969995499, -0.40127161145210266, -0.7315531373023987, 0.013788607902824879, 0.4307641088962555, 0.37106993794441223, -0.6741194725036621, -0.7245364785194397, 0.29781636595726013, -0.48845383524894714, 0.41628846526145935, -0.015063557773828506, -0.49613863229751587, 0.06987302750349045, 0.7018746733665466, 0.48744386434555054, 0.6988048553466797, -0.05570296570658684, 0.07840752601623535, -0.6627734303474426, 0.15740728378295898, -0.012021658010780811, 0.2840363085269928, -0.02326703816652298, -0.31097787618637085, 0.7765457630157471, 0.7137067914009094, -0.5626033544540405, -1.0981645584106445, -0.4248896539211273, -1.4110826253890991, -0.02080667018890381, 1.1062101125717163, -0.026870425790548325, -0.4529307782649994, 0.2317487895488739, -0.11693714559078217, 0.17325939238071442, -0.3183410167694092, 0.7181503176689148, 0.8115428686141968, -0.4002044200897217, 0.060856305062770844, -0.6538642644882202, 0.3749610185623169, 0.5274666547775269, -1.1700316667556763, -0.08077725023031235, 0.25740846991539, 0.30820760130882263, 0.39712265133857727, 0.6525698304176331, -0.09919781982898712, 0.268098920583725, 0.237626850605011, 0.033746138215065, -0.004138851538300514, 0.0229275394231081, -0.2191830724477768, 0.1137222871184349, -0.2993389070034027, -0.44327202439308167 ]
open-llm-leaderboard/details_SLAM-group__NewHope
open-llm-leaderboard
2023-08-27T12:34:23Z
201
0
[ "region:us" ]
null
2023-08-18T11:30:11Z
--- pretty_name: Evaluation run of SLAM-group/NewHope dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [SLAM-group/NewHope](https://huggingface.co/SLAM-group/NewHope) on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SLAM-group__NewHope\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-02T16:20:26.294433](https://huggingface.co/datasets/open-llm-leaderboard/details_SLAM-group__NewHope/blob/main/results_2023-08-02T16%3A20%3A26.294433.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5588691829632426,\n\ \ \"acc_stderr\": 0.03433115773924322,\n \"acc_norm\": 0.5628652703397449,\n\ \ \"acc_norm_stderr\": 0.03430877590228174,\n \"mc1\": 0.3243574051407589,\n\ \ \"mc1_stderr\": 0.016387976779647935,\n \"mc2\": 0.44868368066946906,\n\ \ \"mc2_stderr\": 0.015140951474620613\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5767918088737202,\n \"acc_stderr\": 0.014438036220848022,\n\ \ \"acc_norm\": 0.6092150170648464,\n \"acc_norm_stderr\": 0.014258563880513782\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6366261700856403,\n\ \ \"acc_stderr\": 0.004799882248494812,\n \"acc_norm\": 0.8399721171081458,\n\ \ \"acc_norm_stderr\": 0.003658826208101608\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n\ \ \"acc_stderr\": 0.04284958639753399,\n \"acc_norm\": 0.43703703703703706,\n\ \ \"acc_norm_stderr\": 0.04284958639753399\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5328947368421053,\n \"acc_stderr\": 0.040601270352363966,\n\ \ \"acc_norm\": 0.5328947368421053,\n \"acc_norm_stderr\": 0.040601270352363966\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5811320754716981,\n \"acc_stderr\": 0.03036505082911521,\n\ \ \"acc_norm\": 0.5811320754716981,\n \"acc_norm_stderr\": 0.03036505082911521\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5763888888888888,\n\ \ \"acc_stderr\": 0.04132125019723369,\n \"acc_norm\": 0.5763888888888888,\n\ \ \"acc_norm_stderr\": 0.04132125019723369\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \ \ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n\ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952344,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952344\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n\ \ \"acc_stderr\": 0.038073017265045125,\n \"acc_norm\": 0.5260115606936416,\n\ \ \"acc_norm_stderr\": 0.038073017265045125\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.0433643270799318,\n\ \ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.0433643270799318\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n\ \ \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4595744680851064,\n \"acc_stderr\": 0.03257901482099835,\n\ \ \"acc_norm\": 0.4595744680851064,\n \"acc_norm_stderr\": 0.03257901482099835\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.04142439719489361,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.04142439719489361\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4689655172413793,\n \"acc_stderr\": 0.04158632762097828,\n\ \ \"acc_norm\": 0.4689655172413793,\n \"acc_norm_stderr\": 0.04158632762097828\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3148148148148148,\n \"acc_stderr\": 0.023919984164047732,\n \"\ acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.023919984164047732\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n\ \ \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n\ \ \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6870967741935484,\n\ \ \"acc_stderr\": 0.02637756702864586,\n \"acc_norm\": 0.6870967741935484,\n\ \ \"acc_norm_stderr\": 0.02637756702864586\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.39901477832512317,\n \"acc_stderr\": 0.03445487686264715,\n\ \ \"acc_norm\": 0.39901477832512317,\n \"acc_norm_stderr\": 0.03445487686264715\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\"\ : 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6606060606060606,\n \"acc_stderr\": 0.03697442205031595,\n\ \ \"acc_norm\": 0.6606060606060606,\n \"acc_norm_stderr\": 0.03697442205031595\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7070707070707071,\n \"acc_stderr\": 0.032424979581788166,\n \"\ acc_norm\": 0.7070707070707071,\n \"acc_norm_stderr\": 0.032424979581788166\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8341968911917098,\n \"acc_stderr\": 0.026839845022314415,\n\ \ \"acc_norm\": 0.8341968911917098,\n \"acc_norm_stderr\": 0.026839845022314415\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5256410256410257,\n \"acc_stderr\": 0.025317649726448656,\n\ \ \"acc_norm\": 0.5256410256410257,\n \"acc_norm_stderr\": 0.025317649726448656\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.32592592592592595,\n \"acc_stderr\": 0.02857834836547307,\n \ \ \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.02857834836547307\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6134453781512605,\n \"acc_stderr\": 0.03163145807552379,\n \ \ \"acc_norm\": 0.6134453781512605,\n \"acc_norm_stderr\": 0.03163145807552379\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658753,\n \"\ acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658753\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7211009174311926,\n \"acc_stderr\": 0.01922746887646351,\n \"\ acc_norm\": 0.7211009174311926,\n \"acc_norm_stderr\": 0.01922746887646351\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4583333333333333,\n \"acc_stderr\": 0.03398110890294636,\n \"\ acc_norm\": 0.4583333333333333,\n \"acc_norm_stderr\": 0.03398110890294636\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7745098039215687,\n \"acc_stderr\": 0.029331162294251735,\n \"\ acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.029331162294251735\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7426160337552743,\n \"acc_stderr\": 0.028458820991460295,\n \ \ \"acc_norm\": 0.7426160337552743,\n \"acc_norm_stderr\": 0.028458820991460295\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n\ \ \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n\ \ \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\ \ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7107438016528925,\n \"acc_stderr\": 0.04139112727635463,\n \"\ acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.04139112727635463\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n\ \ \"acc_stderr\": 0.04453197507374983,\n \"acc_norm\": 0.6944444444444444,\n\ \ \"acc_norm_stderr\": 0.04453197507374983\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6380368098159509,\n \"acc_stderr\": 0.037757007291414416,\n\ \ \"acc_norm\": 0.6380368098159509,\n \"acc_norm_stderr\": 0.037757007291414416\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\ \ \"acc_stderr\": 0.04246624336697625,\n \"acc_norm\": 0.2767857142857143,\n\ \ \"acc_norm_stderr\": 0.04246624336697625\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\ \ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7991452991452992,\n\ \ \"acc_stderr\": 0.026246772946890467,\n \"acc_norm\": 0.7991452991452992,\n\ \ \"acc_norm_stderr\": 0.026246772946890467\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \ \ \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7496807151979565,\n\ \ \"acc_stderr\": 0.015491088951494569,\n \"acc_norm\": 0.7496807151979565,\n\ \ \"acc_norm_stderr\": 0.015491088951494569\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.025906632631016127,\n\ \ \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.025906632631016127\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.38100558659217876,\n\ \ \"acc_stderr\": 0.016242028834053613,\n \"acc_norm\": 0.38100558659217876,\n\ \ \"acc_norm_stderr\": 0.016242028834053613\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n\ \ \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n\ \ \"acc_stderr\": 0.027316847674192714,\n \"acc_norm\": 0.6366559485530546,\n\ \ \"acc_norm_stderr\": 0.027316847674192714\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.026725868809100793,\n\ \ \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.026725868809100793\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.425531914893617,\n \"acc_stderr\": 0.02949482760014438,\n \ \ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.02949482760014438\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44589308996088656,\n\ \ \"acc_stderr\": 0.012695244711379776,\n \"acc_norm\": 0.44589308996088656,\n\ \ \"acc_norm_stderr\": 0.012695244711379776\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.03016191193076711,\n\ \ \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.03016191193076711\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5473856209150327,\n \"acc_stderr\": 0.020136790918492523,\n \ \ \"acc_norm\": 0.5473856209150327,\n \"acc_norm_stderr\": 0.020136790918492523\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\ \ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\ \ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.03093285879278985,\n\ \ \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.03093285879278985\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.736318407960199,\n\ \ \"acc_stderr\": 0.031157150869355558,\n \"acc_norm\": 0.736318407960199,\n\ \ \"acc_norm_stderr\": 0.031157150869355558\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \ \ \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n\ \ \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n\ \ \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338734,\n\ \ \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338734\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3243574051407589,\n\ \ \"mc1_stderr\": 0.016387976779647935,\n \"mc2\": 0.44868368066946906,\n\ \ \"mc2_stderr\": 0.015140951474620613\n }\n}\n```" repo_url: https://huggingface.co/SLAM-group/NewHope leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|arc:challenge|25_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hellaswag|10_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-02T16:20:26.294433.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-management|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-02T16:20:26.294433.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_02T16_20_26.294433 path: - '**/details_harness|truthfulqa:mc|0_2023-08-02T16:20:26.294433.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-02T16:20:26.294433.parquet' - config_name: results data_files: - split: 2023_08_02T16_20_26.294433 path: - results_2023-08-02T16:20:26.294433.parquet - split: latest path: - results_2023-08-02T16:20:26.294433.parquet --- # Dataset Card for Evaluation run of SLAM-group/NewHope ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/SLAM-group/NewHope - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [SLAM-group/NewHope](https://huggingface.co/SLAM-group/NewHope) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SLAM-group__NewHope", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-02T16:20:26.294433](https://huggingface.co/datasets/open-llm-leaderboard/details_SLAM-group__NewHope/blob/main/results_2023-08-02T16%3A20%3A26.294433.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5588691829632426, "acc_stderr": 0.03433115773924322, "acc_norm": 0.5628652703397449, "acc_norm_stderr": 0.03430877590228174, "mc1": 0.3243574051407589, "mc1_stderr": 0.016387976779647935, "mc2": 0.44868368066946906, "mc2_stderr": 0.015140951474620613 }, "harness|arc:challenge|25": { "acc": 0.5767918088737202, "acc_stderr": 0.014438036220848022, "acc_norm": 0.6092150170648464, "acc_norm_stderr": 0.014258563880513782 }, "harness|hellaswag|10": { "acc": 0.6366261700856403, "acc_stderr": 0.004799882248494812, "acc_norm": 0.8399721171081458, "acc_norm_stderr": 0.003658826208101608 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5328947368421053, "acc_stderr": 0.040601270352363966, "acc_norm": 0.5328947368421053, "acc_norm_stderr": 0.040601270352363966 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5811320754716981, "acc_stderr": 0.03036505082911521, "acc_norm": 0.5811320754716981, "acc_norm_stderr": 0.03036505082911521 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5763888888888888, "acc_stderr": 0.04132125019723369, "acc_norm": 0.5763888888888888, "acc_norm_stderr": 0.04132125019723369 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.047609522856952344, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952344 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5260115606936416, "acc_stderr": 0.038073017265045125, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.038073017265045125 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.0433643270799318, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.0433643270799318 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099835, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489361, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489361 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4689655172413793, "acc_stderr": 0.04158632762097828, "acc_norm": 0.4689655172413793, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.023919984164047732, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.023919984164047732 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.042857142857142816, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.042857142857142816 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6870967741935484, "acc_stderr": 0.02637756702864586, "acc_norm": 0.6870967741935484, "acc_norm_stderr": 0.02637756702864586 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39901477832512317, "acc_stderr": 0.03445487686264715, "acc_norm": 0.39901477832512317, "acc_norm_stderr": 0.03445487686264715 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6606060606060606, "acc_stderr": 0.03697442205031595, "acc_norm": 0.6606060606060606, "acc_norm_stderr": 0.03697442205031595 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7070707070707071, "acc_stderr": 0.032424979581788166, "acc_norm": 0.7070707070707071, "acc_norm_stderr": 0.032424979581788166 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8341968911917098, "acc_stderr": 0.026839845022314415, "acc_norm": 0.8341968911917098, "acc_norm_stderr": 0.026839845022314415 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5256410256410257, "acc_stderr": 0.025317649726448656, "acc_norm": 0.5256410256410257, "acc_norm_stderr": 0.025317649726448656 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547307, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547307 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6134453781512605, "acc_stderr": 0.03163145807552379, "acc_norm": 0.6134453781512605, "acc_norm_stderr": 0.03163145807552379 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658753, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658753 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7211009174311926, "acc_stderr": 0.01922746887646351, "acc_norm": 0.7211009174311926, "acc_norm_stderr": 0.01922746887646351 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4583333333333333, "acc_stderr": 0.03398110890294636, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.029331162294251735, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.029331162294251735 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7426160337552743, "acc_stderr": 0.028458820991460295, "acc_norm": 0.7426160337552743, "acc_norm_stderr": 0.028458820991460295 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.04139112727635463, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.04139112727635463 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.04453197507374983, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.04453197507374983 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6380368098159509, "acc_stderr": 0.037757007291414416, "acc_norm": 0.6380368098159509, "acc_norm_stderr": 0.037757007291414416 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697625, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697625 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7991452991452992, "acc_stderr": 0.026246772946890467, "acc_norm": 0.7991452991452992, "acc_norm_stderr": 0.026246772946890467 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7496807151979565, "acc_stderr": 0.015491088951494569, "acc_norm": 0.7496807151979565, "acc_norm_stderr": 0.015491088951494569 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6358381502890174, "acc_stderr": 0.025906632631016127, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.025906632631016127 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.38100558659217876, "acc_stderr": 0.016242028834053613, "acc_norm": 0.38100558659217876, "acc_norm_stderr": 0.016242028834053613 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6405228758169934, "acc_stderr": 0.027475969910660952, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.027475969910660952 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6366559485530546, "acc_stderr": 0.027316847674192714, "acc_norm": 0.6366559485530546, "acc_norm_stderr": 0.027316847674192714 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6388888888888888, "acc_stderr": 0.026725868809100793, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.026725868809100793 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.425531914893617, "acc_stderr": 0.02949482760014438, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.02949482760014438 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.44589308996088656, "acc_stderr": 0.012695244711379776, "acc_norm": 0.44589308996088656, "acc_norm_stderr": 0.012695244711379776 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5588235294117647, "acc_stderr": 0.03016191193076711, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.03016191193076711 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5473856209150327, "acc_stderr": 0.020136790918492523, "acc_norm": 0.5473856209150327, "acc_norm_stderr": 0.020136790918492523 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.03093285879278985, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.03093285879278985 }, "harness|hendrycksTest-sociology|5": { "acc": 0.736318407960199, "acc_stderr": 0.031157150869355558, "acc_norm": 0.736318407960199, "acc_norm_stderr": 0.031157150869355558 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333045, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338734, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338734 }, "harness|truthfulqa:mc|0": { "mc1": 0.3243574051407589, "mc1_stderr": 0.016387976779647935, "mc2": 0.44868368066946906, "mc2_stderr": 0.015140951474620613 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7367315888404846, -0.8493001461029053, 0.2831045389175415, 0.20268382132053375, -0.15679490566253662, -0.036914143711328506, 0.02442084066569805, -0.1877482831478119, 0.613854706287384, -0.07538760453462601, -0.4644000828266144, -0.7376779317855835, -0.5462169647216797, 0.2573646903038025, -0.019945917651057243, 0.8537829518318176, -0.1988348513841629, -0.20094996690750122, 0.12237906455993652, -0.0728374570608139, -0.25302985310554504, -0.29886558651924133, -0.49703189730644226, -0.34593749046325684, 0.16993999481201172, 0.4238528311252594, 0.4970654249191284, 0.7915019392967224, 0.7262222766876221, 0.29474255442619324, -0.31848254799842834, -0.017857255414128304, -0.18613934516906738, -0.31223854422569275, 0.4072951078414917, -0.3298172056674957, -0.877292275428772, 0.3515736758708954, 0.7895904779434204, 0.6530494689941406, -0.05551787093281746, 0.3096403181552887, 0.0048740217462182045, 0.5450159311294556, -0.39722710847854614, 0.04863357916474342, -0.2944924533367157, 0.26491665840148926, -0.20539920032024384, -0.3536894619464874, -0.3147372305393219, -0.24833551049232483, -0.10935524851083755, -0.9181293249130249, 0.2974388599395752, 0.34227100014686584, 1.575736403465271, -0.1138056069612503, -0.2548180818557739, 0.12745124101638794, -0.08766230195760727, 1.0058729648590088, -0.8472684621810913, 0.3599494397640228, 0.814310610294342, 0.17427115142345428, -0.21140708029270172, -0.5226297378540039, -0.6286605000495911, 0.09878213703632355, -0.32773539423942566, 0.3743123412132263, -0.05379137769341469, -0.20322288572788239, 0.3855418264865875, 0.6791641712188721, -0.6810972690582275, 0.127582386136055, -0.6629920601844788, -0.1591801643371582, 1.0860918760299683, 0.30861401557922363, 0.06923635303974152, -0.3748345673084259, -0.6978646516799927, -0.6634863615036011, -0.40515148639678955, 0.23429641127586365, 0.4188050627708435, 0.37192654609680176, -0.4301535189151764, 0.6932563781738281, -0.4376077353954315, 0.573634684085846, 0.4042295515537262, -0.047432348132133484, 0.9350442886352539, -0.6187946200370789, -0.5325952768325806, -0.05512756481766701, 1.092841625213623, 0.6307535171508789, 0.037770211696624756, 0.20697084069252014, 0.05223073065280914, -0.08323434740304947, 0.021279027685523033, -0.7991109490394592, -0.3100462853908539, 0.11482878029346466, -0.4046345353126526, -0.4644245207309723, 0.34894388914108276, -0.8840593099594116, 0.11511813849210739, -0.015059009194374084, 0.45193809270858765, -0.49050819873809814, -0.12019291520118713, 0.2934679090976715, -0.46692195534706116, 0.8362706303596497, -0.15660955011844635, -0.7440679669380188, 0.3953329026699066, 0.48528632521629333, 0.7769792079925537, -0.10560857504606247, -0.45547765493392944, -0.12961691617965698, -0.05784725770354271, -0.2343699187040329, 0.556376576423645, -0.30337023735046387, -0.44869616627693176, -0.32156217098236084, 0.3021656572818756, -0.24688154458999634, -0.33186113834381104, 0.7463101148605347, -0.2113138884305954, 0.2251880168914795, -0.4162502586841583, -0.6576282978057861, 0.11430104821920395, 0.3981621265411377, -0.42591866850852966, 1.2922580242156982, 0.277316689491272, -0.8072407841682434, 0.4696941375732422, -0.6116010546684265, -0.19121859967708588, -0.0046234107576310635, -0.04783516749739647, -0.7815495133399963, -0.2306891232728958, 0.19749335944652557, 0.40284422039985657, -0.1675080806016922, -0.1407490223646164, -0.38917723298072815, -0.34631434082984924, 0.34999895095825195, -0.17594797909259796, 1.1973284482955933, -0.027328480035066605, -0.7633563876152039, -0.1468198448419571, -1.24016273021698, 0.28946223855018616, 0.20207636058330536, -0.3584885001182556, -0.17085057497024536, -0.4700770378112793, -0.06113675236701965, 0.17785072326660156, 0.25799253582954407, -0.8344691395759583, 0.31148943305015564, -0.32637128233909607, 0.10145280510187149, 1.2339708805084229, 0.020757639780640602, 0.14916925132274628, -0.5555233955383301, 0.5541570782661438, 0.1718297153711319, 0.20946857333183289, 0.4046064019203186, -0.6384606957435608, -0.7537441253662109, -0.48423847556114197, -0.08332565426826477, 0.5673931241035461, -0.1661672294139862, 1.0954960584640503, 0.02190292440354824, -0.8753644227981567, -0.47870153188705444, -0.12431040406227112, 0.4758366644382477, 0.7873314619064331, 0.6342231035232544, -0.08724581450223923, -0.6426252126693726, -1.0547375679016113, -0.2754158675670624, -0.12083544582128525, 0.16211847960948944, 0.21367090940475464, 0.9842286109924316, -0.2647601366043091, 0.603309154510498, -1.0686031579971313, -0.20788481831550598, 0.15457478165626526, -0.03852471709251404, 0.779664158821106, 0.7102578282356262, 0.5528093576431274, -0.689717173576355, -0.5227077603340149, 0.1714671105146408, -0.8913447856903076, -0.0938698798418045, 0.17283056676387787, -0.3211497366428375, 0.11787555366754532, 0.10880935937166214, -0.6666433811187744, 0.5715171694755554, 0.23812182247638702, -1.0792734622955322, 1.0284302234649658, -0.34008291363716125, 0.5803214907646179, -1.0173289775848389, 0.12476417422294617, -0.06385685503482819, 0.05614091828465462, -0.4330269992351532, 0.025883935391902924, 0.049632612615823746, 0.455304354429245, -0.5055258274078369, 0.8072167634963989, -0.7070143818855286, -0.08008408546447754, 0.3766787052154541, 0.17404401302337646, -0.1060270294547081, 0.31523987650871277, -0.23545314371585846, 0.7618677020072937, 0.7676048874855042, -0.47519952058792114, 0.5411892533302307, 0.44864732027053833, -0.21136845648288727, 0.7281694412231445, -0.46629902720451355, -0.3443589508533478, 0.2899586856365204, -0.022239912301301956, -0.785362720489502, -0.5277029275894165, 0.0021448913030326366, -0.620307445526123, -0.07498869299888611, 0.3411668837070465, -0.23203809559345245, -0.8371805548667908, -0.927807629108429, 0.37978696823120117, 0.7242241501808167, -0.3916647732257843, -0.22033965587615967, 0.12854960560798645, 0.12411150336265564, -0.8004966974258423, -0.8128378391265869, -0.4699266254901886, -0.20932906866073608, -0.6892876625061035, 0.29709574580192566, -0.24557356536388397, -0.24387575685977936, -0.09202351421117783, -0.21339426934719086, -0.31829309463500977, 0.007099644746631384, 0.14273402094841003, 0.662530779838562, -0.4124698042869568, -0.3222604990005493, -0.27793028950691223, -0.24744614958763123, 0.20205263793468475, -0.10244458168745041, 0.3783774673938751, -0.456136018037796, -0.4452746510505676, -0.4245281517505646, -0.007067686412483454, 0.6977099776268005, -0.0967116430401802, 0.7236964702606201, 0.4595770835876465, -0.2873733341693878, 0.03826996311545372, -0.31041815876960754, -0.2513810098171234, -0.5766133069992065, 0.2610514760017395, -0.4988408386707306, -1.0865206718444824, 0.743967592716217, 0.48743391036987305, 0.027242369949817657, 1.1406973600387573, 0.6127464771270752, -0.2839542329311371, 0.9993716478347778, 0.037976909428834915, 0.30399826169013977, 0.317163348197937, -0.6674449443817139, 0.14449505507946014, -0.9884390234947205, -0.2945100963115692, -0.5332338809967041, -0.5346201062202454, -0.7410103678703308, -0.11414925009012222, 0.3019627332687378, 0.1040988340973854, -0.6628645658493042, 0.5935235619544983, -0.8648797273635864, 0.6107888221740723, 0.5576054453849792, 0.22344769537448883, 0.13839316368103027, -0.20617324113845825, -0.3546437919139862, -0.13548234105110168, -0.4430077075958252, -0.25874727964401245, 1.2685303688049316, 0.2574257552623749, 0.76059490442276, 0.09412896633148193, 0.9083322286605835, 0.07669896632432938, -0.09884973615407944, -0.5493941903114319, 0.6411697864532471, 0.12183098495006561, -0.7992187738418579, -0.4067433178424835, -0.4980465769767761, -1.1166330575942993, 0.39203667640686035, -0.162098690867424, -0.8604826331138611, 0.06912526488304138, 0.020798588171601295, -0.23166806995868683, 0.5364484190940857, -0.5427276492118835, 0.8192451000213623, -0.10336413979530334, -0.45940953493118286, 0.16458162665367126, -0.859789252281189, 0.46198150515556335, 0.23943886160850525, 0.2560507357120514, -0.0389624685049057, 0.3155694901943207, 1.1638301610946655, -0.7499008178710938, 0.3989298343658447, 0.07027538865804672, 0.05454714596271515, 0.31204408407211304, -0.15692052245140076, 0.48781850934028625, 0.08864080160856247, -0.010288713499903679, -0.13926880061626434, 0.2839902937412262, -0.8766704797744751, -0.09204866737127304, 0.9029344916343689, -0.9584002494812012, -0.6189945340156555, -0.9069904088973999, -0.5869765281677246, 0.11127019673585892, 0.5494576096534729, 0.39087095856666565, 0.5397390723228455, 0.0337648019194603, 0.4639962315559387, 0.8355679512023926, -0.14093460142612457, 0.5603997707366943, 0.25234153866767883, 0.09522795677185059, -0.6537416577339172, 0.861045241355896, 0.08011845499277115, 0.35885196924209595, 0.29099106788635254, 0.38882094621658325, -0.5381637215614319, -0.2312605082988739, -0.1618042141199112, 0.4915793538093567, -0.5857282876968384, -0.27210283279418945, -0.3579394221305847, -0.3499510884284973, -0.8453078269958496, -0.6655411720275879, -0.3341682255268097, -0.5176548361778259, -0.4232187271118164, -0.49233752489089966, 0.5912260413169861, 0.48549261689186096, -0.36624130606651306, 0.04680963233113289, -0.5090457797050476, 0.2678738534450531, 0.2732156813144684, 0.537876307964325, -0.4509756565093994, -0.5645954012870789, 0.06111954152584076, -0.140205979347229, -0.6278547644615173, -0.9714831709861755, 0.3313666582107544, -0.03592897951602936, 0.5065160989761353, 0.625346302986145, 0.1069616973400116, 0.8463159799575806, -0.23253832757472992, 1.0463740825653076, 0.3156518340110779, -0.753811776638031, 0.7717339992523193, -0.3361070454120636, 0.2044079601764679, 0.6665526628494263, 0.18360090255737305, -0.2121639847755432, -0.7322878837585449, -1.3332277536392212, -0.7940535545349121, 0.6100779175758362, 0.38564303517341614, -0.28232210874557495, 0.00474747596308589, 0.16940808296203613, -0.2743120491504669, -0.18465329706668854, -0.7012680768966675, -0.8829677700996399, -0.1448550522327423, -0.506287157535553, 0.0804445818066597, 0.016077054664492607, -0.41428714990615845, -0.8290777802467346, 0.9044795632362366, -0.011495529673993587, 0.6233266592025757, 0.4441606402397156, 0.07357456535100937, 0.08664305508136749, 0.4858202040195465, 0.9635220170021057, 0.7302790284156799, -0.46220555901527405, 0.4647594094276428, 0.44785112142562866, -1.0303611755371094, 0.4905816614627838, 0.30294960737228394, -0.06642033904790878, -0.0442713238298893, 0.4825640320777893, 0.4340094327926636, 0.08514285087585449, -0.15578831732273102, 0.6410198211669922, 0.014756436459720135, -0.5555146336555481, -0.3912256360054016, 0.10458783060312271, -0.1920924186706543, -0.007881931029260159, 0.37775683403015137, -0.1618890017271042, -0.02108042873442173, -0.4570466876029968, 0.47033771872520447, 0.39395859837532043, -0.44119992852211, -0.18976889550685883, 0.7069414258003235, -0.20204250514507294, -0.14368534088134766, 0.3058463931083679, -0.24704954028129578, -0.6422162652015686, 1.0407555103302002, 0.6189178228378296, 0.676995575428009, -0.27037328481674194, -0.046610377728939056, 0.9146925210952759, 0.40162280201911926, -0.006902461405843496, 0.5061578154563904, 0.3120488226413727, -0.24548223614692688, 0.16859140992164612, -0.894381582736969, -0.053149666637182236, 0.1128740906715393, -0.8387954235076904, 0.3261752724647522, -0.4944332242012024, -0.19500339031219482, -0.05231860280036926, 0.41257843375205994, -0.4455394744873047, 0.5597032308578491, -0.4140998125076294, 1.2089282274246216, -0.982661783695221, 0.6991711854934692, 0.7502647042274475, -0.5367158055305481, -1.0227166414260864, -0.5481425523757935, 0.05759497731924057, -0.7940370440483093, 0.5702709555625916, -0.03657917678356171, 0.18821248412132263, -0.10614355653524399, -0.6965962052345276, -0.9624500274658203, 1.3966904878616333, -0.049336690455675125, -0.3434257209300995, 0.2240518033504486, -0.029024789109826088, 0.4106701612472534, 0.1573123186826706, 0.5927661061286926, 0.7368165850639343, 0.8352332711219788, -0.05401824787259102, -0.7398397922515869, 0.3294878304004669, -0.5166687965393066, -0.35278934240341187, 0.5127806067466736, -0.9112218022346497, 1.2166857719421387, 0.01745731569826603, 0.22645032405853271, -0.240930438041687, 0.6774908304214478, 0.788722574710846, 0.3268318176269531, 0.3369302451610565, 0.9603754878044128, 0.8656825423240662, -0.5257960557937622, 1.0572017431259155, -0.20974202454090118, 0.8707023859024048, 0.7401993274688721, 0.1976117342710495, 0.7702911496162415, 0.6703770756721497, -0.5573808550834656, 0.5418029427528381, 0.7758339047431946, -0.3232603967189789, 0.38375124335289, 0.2690827548503876, -0.14865469932556152, -0.11586623638868332, 0.45793986320495605, -0.9024614691734314, 0.16515770554542542, 0.03757843002676964, -0.3026605248451233, 0.05477340519428253, -0.41158729791641235, 0.32575762271881104, -0.08951980620622635, 0.03227715194225311, 0.35306695103645325, 0.0023814314045011997, -0.4108259975910187, 0.9139130711555481, -0.16677328944206238, 0.7520908117294312, -0.5256914496421814, -0.06670647114515305, -0.33257347345352173, 0.6118835210800171, -0.4271889925003052, -1.046720027923584, 0.19415341317653656, 0.03845364227890968, -0.14503349363803864, -0.1656101942062378, 0.6530458331108093, -0.1749548614025116, -0.78116375207901, 0.1198018491268158, 0.022621404379606247, 0.14277614653110504, 0.5294621586799622, -0.6301463842391968, -0.3050701916217804, -0.08207011222839355, -0.5144214034080505, 0.11529166996479034, 0.31932902336120605, 0.2945139706134796, 0.5427520275115967, 0.6269352436065674, 0.18471412360668182, 0.48326730728149414, -0.5739301443099976, 0.7607851624488831, -1.0608999729156494, -0.699940025806427, -0.8867939114570618, 0.41867196559906006, -0.32249656319618225, -0.909600555896759, 1.0657061338424683, 1.0573993921279907, 0.8927528262138367, 0.046133194118738174, 0.6115896701812744, -0.36641189455986023, 0.2769107520580292, -0.3749352693557739, 0.9474982619285583, -0.8538925051689148, -0.25687146186828613, -0.3103652596473694, -0.6992588043212891, -0.4259208142757416, 0.8676832318305969, -0.1879519373178482, 0.03916556015610695, 1.0623689889907837, 0.6504720449447632, -0.10712199658155441, 0.07724123448133469, -0.04722430184483528, 0.5764791965484619, 0.4122282564640045, 1.0135055780410767, 0.6142019629478455, -0.7974318861961365, 0.2790622413158417, -0.47986483573913574, -0.454556405544281, -0.39801889657974243, -0.48433077335357666, -0.8078066110610962, -0.5030471682548523, -0.2566603422164917, -0.5953956842422485, -0.13013850152492523, 1.0269501209259033, 0.44999176263809204, -0.9084966778755188, -0.4025091230869293, -0.08145888894796371, 0.1490466147661209, -0.5654665231704712, -0.4140934348106384, 0.728388786315918, -0.14685210585594177, -0.5535840392112732, 0.21498160064220428, -0.09088537096977234, 0.23926852643489838, 0.13545580208301544, -0.4259817600250244, -0.7473179697990417, -0.024739276617765427, 0.4525832235813141, 0.34956595301628113, -0.704176664352417, -0.7456908822059631, 0.28057393431663513, -0.48469045758247375, 0.42701324820518494, -0.027783527970314026, -0.513927161693573, 0.09322886914014816, 0.649509608745575, 0.5060103535652161, 0.6599985361099243, -0.07769530266523361, 0.08988546580076218, -0.6623793244361877, 0.21374203264713287, -0.015754904597997665, 0.29289260506629944, -0.0331076979637146, -0.2730896472930908, 0.7694723606109619, 0.688797652721405, -0.5505868196487427, -1.0737391710281372, -0.4317634105682373, -1.4382556676864624, 0.05008509010076523, 1.1234959363937378, 0.010494186542928219, -0.5111022591590881, 0.22715915739536285, -0.146405890583992, 0.21579186618328094, -0.2927032709121704, 0.780113160610199, 0.8090335726737976, -0.3889710009098053, 0.11441516131162643, -0.6500831842422485, 0.3691212832927704, 0.5258589386940002, -1.1744942665100098, -0.13365858793258667, 0.23401428759098053, 0.27936992049217224, 0.37542203068733215, 0.6316705942153931, -0.12009856849908829, 0.2738843858242035, 0.21031217277050018, 0.046519163995981216, 0.014464862644672394, 0.033816952258348465, -0.18705140054225922, 0.10585261136293411, -0.25985005497932434, -0.48260945081710815 ]
open-llm-leaderboard/details_timdettmers__guanaco-33b-merged
open-llm-leaderboard
2023-08-27T12:34:28Z
201
0
[ "region:us" ]
null
2023-08-18T11:30:37Z
--- pretty_name: Evaluation run of timdettmers/guanaco-33b-merged dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [timdettmers/guanaco-33b-merged](https://huggingface.co/timdettmers/guanaco-33b-merged)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_timdettmers__guanaco-33b-merged\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-20T14:01:44.026263](https://huggingface.co/datasets/open-llm-leaderboard/details_timdettmers__guanaco-33b-merged/blob/main/results_2023-07-20T14%3A01%3A44.026263.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5404553018205109,\n\ \ \"acc_stderr\": 0.03488622237927161,\n \"acc_norm\": 0.5444824613318672,\n\ \ \"acc_norm_stderr\": 0.03486249375448495,\n \"mc1\": 0.34761321909424725,\n\ \ \"mc1_stderr\": 0.016670769188897306,\n \"mc2\": 0.5121992740888713,\n\ \ \"mc2_stderr\": 0.014650490351006002\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5870307167235495,\n \"acc_stderr\": 0.014388344935398326,\n\ \ \"acc_norm\": 0.6245733788395904,\n \"acc_norm_stderr\": 0.014150631435111726\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6446922923720374,\n\ \ \"acc_stderr\": 0.004776283203468098,\n \"acc_norm\": 0.8447520414260108,\n\ \ \"acc_norm_stderr\": 0.003614007841341989\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n\ \ \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5622641509433962,\n \"acc_stderr\": 0.030533338430467516,\n\ \ \"acc_norm\": 0.5622641509433962,\n \"acc_norm_stderr\": 0.030533338430467516\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5625,\n\ \ \"acc_stderr\": 0.04148415739394154,\n \"acc_norm\": 0.5625,\n \ \ \"acc_norm_stderr\": 0.04148415739394154\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\"\ : 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5144508670520231,\n\ \ \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.5144508670520231,\n\ \ \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383889,\n\ \ \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383889\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n\ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4595744680851064,\n \"acc_stderr\": 0.03257901482099835,\n\ \ \"acc_norm\": 0.4595744680851064,\n \"acc_norm_stderr\": 0.03257901482099835\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3684210526315789,\n\ \ \"acc_stderr\": 0.04537815354939391,\n \"acc_norm\": 0.3684210526315789,\n\ \ \"acc_norm_stderr\": 0.04537815354939391\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4413793103448276,\n \"acc_stderr\": 0.04137931034482758,\n\ \ \"acc_norm\": 0.4413793103448276,\n \"acc_norm_stderr\": 0.04137931034482758\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.31216931216931215,\n \"acc_stderr\": 0.0238652068369726,\n \"\ acc_norm\": 0.31216931216931215,\n \"acc_norm_stderr\": 0.0238652068369726\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.29365079365079366,\n\ \ \"acc_stderr\": 0.04073524322147126,\n \"acc_norm\": 0.29365079365079366,\n\ \ \"acc_norm_stderr\": 0.04073524322147126\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.6290322580645161,\n \"acc_stderr\": 0.027480541887953593,\n \"\ acc_norm\": 0.6290322580645161,\n \"acc_norm_stderr\": 0.027480541887953593\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.3793103448275862,\n \"acc_stderr\": 0.03413963805906235,\n \"\ acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.03413963805906235\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\"\ : 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.035014387062967806,\n\ \ \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.035014387062967806\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7222222222222222,\n \"acc_stderr\": 0.03191178226713547,\n \"\ acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.03191178226713547\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7150259067357513,\n \"acc_stderr\": 0.032577140777096614,\n\ \ \"acc_norm\": 0.7150259067357513,\n \"acc_norm_stderr\": 0.032577140777096614\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.49230769230769234,\n \"acc_stderr\": 0.025348006031534778,\n\ \ \"acc_norm\": 0.49230769230769234,\n \"acc_norm_stderr\": 0.025348006031534778\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02730914058823019,\n \ \ \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02730914058823019\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.03242225027115006,\n \ \ \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.03242225027115006\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658752,\n \"\ acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658752\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7302752293577982,\n \"acc_stderr\": 0.019028486711115438,\n \"\ acc_norm\": 0.7302752293577982,\n \"acc_norm_stderr\": 0.019028486711115438\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.375,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.375,\n\ \ \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\"\ : {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967409,\n\ \ \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967409\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080851,\n \ \ \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080851\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5874439461883408,\n\ \ \"acc_stderr\": 0.03304062175449297,\n \"acc_norm\": 0.5874439461883408,\n\ \ \"acc_norm_stderr\": 0.03304062175449297\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6412213740458015,\n \"acc_stderr\": 0.04206739313864908,\n\ \ \"acc_norm\": 0.6412213740458015,\n \"acc_norm_stderr\": 0.04206739313864908\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"\ acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6111111111111112,\n\ \ \"acc_stderr\": 0.04712821257426769,\n \"acc_norm\": 0.6111111111111112,\n\ \ \"acc_norm_stderr\": 0.04712821257426769\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6380368098159509,\n \"acc_stderr\": 0.037757007291414416,\n\ \ \"acc_norm\": 0.6380368098159509,\n \"acc_norm_stderr\": 0.037757007291414416\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n\ \ \"acc_stderr\": 0.04616143075028546,\n \"acc_norm\": 0.38392857142857145,\n\ \ \"acc_norm_stderr\": 0.04616143075028546\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6310679611650486,\n \"acc_stderr\": 0.0477761518115674,\n\ \ \"acc_norm\": 0.6310679611650486,\n \"acc_norm_stderr\": 0.0477761518115674\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7863247863247863,\n\ \ \"acc_stderr\": 0.026853450377009154,\n \"acc_norm\": 0.7863247863247863,\n\ \ \"acc_norm_stderr\": 0.026853450377009154\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6756066411238825,\n\ \ \"acc_stderr\": 0.0167409290471627,\n \"acc_norm\": 0.6756066411238825,\n\ \ \"acc_norm_stderr\": 0.0167409290471627\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5491329479768786,\n \"acc_stderr\": 0.026788811931562757,\n\ \ \"acc_norm\": 0.5491329479768786,\n \"acc_norm_stderr\": 0.026788811931562757\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2569832402234637,\n\ \ \"acc_stderr\": 0.01461446582196632,\n \"acc_norm\": 0.2569832402234637,\n\ \ \"acc_norm_stderr\": 0.01461446582196632\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5718954248366013,\n \"acc_stderr\": 0.028332397483664278,\n\ \ \"acc_norm\": 0.5718954248366013,\n \"acc_norm_stderr\": 0.028332397483664278\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6012861736334405,\n\ \ \"acc_stderr\": 0.027809322585774496,\n \"acc_norm\": 0.6012861736334405,\n\ \ \"acc_norm_stderr\": 0.027809322585774496\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6172839506172839,\n \"acc_stderr\": 0.027044538138402595,\n\ \ \"acc_norm\": 0.6172839506172839,\n \"acc_norm_stderr\": 0.027044538138402595\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.42907801418439717,\n \"acc_stderr\": 0.029525914302558555,\n \ \ \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.029525914302558555\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.424380704041721,\n\ \ \"acc_stderr\": 0.01262334375743002,\n \"acc_norm\": 0.424380704041721,\n\ \ \"acc_norm_stderr\": 0.01262334375743002\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5808823529411765,\n \"acc_stderr\": 0.02997280717046462,\n\ \ \"acc_norm\": 0.5808823529411765,\n \"acc_norm_stderr\": 0.02997280717046462\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5212418300653595,\n \"acc_stderr\": 0.020209572388600248,\n \ \ \"acc_norm\": 0.5212418300653595,\n \"acc_norm_stderr\": 0.020209572388600248\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\ \ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n\ \ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5918367346938775,\n \"acc_stderr\": 0.03146465712827424,\n\ \ \"acc_norm\": 0.5918367346938775,\n \"acc_norm_stderr\": 0.03146465712827424\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6915422885572139,\n\ \ \"acc_stderr\": 0.032658195885126966,\n \"acc_norm\": 0.6915422885572139,\n\ \ \"acc_norm_stderr\": 0.032658195885126966\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \ \ \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n\ \ \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.4578313253012048,\n\ \ \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7251461988304093,\n \"acc_stderr\": 0.03424042924691584,\n\ \ \"acc_norm\": 0.7251461988304093,\n \"acc_norm_stderr\": 0.03424042924691584\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.34761321909424725,\n\ \ \"mc1_stderr\": 0.016670769188897306,\n \"mc2\": 0.5121992740888713,\n\ \ \"mc2_stderr\": 0.014650490351006002\n }\n}\n```" repo_url: https://huggingface.co/timdettmers/guanaco-33b-merged leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|arc:challenge|25_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hellaswag|10_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-20T14:01:44.026263.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-management|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-20T14:01:44.026263.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_20T14_01_44.026263 path: - '**/details_harness|truthfulqa:mc|0_2023-07-20T14:01:44.026263.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-20T14:01:44.026263.parquet' - config_name: results data_files: - split: 2023_07_20T14_01_44.026263 path: - results_2023-07-20T14:01:44.026263.parquet - split: latest path: - results_2023-07-20T14:01:44.026263.parquet --- # Dataset Card for Evaluation run of timdettmers/guanaco-33b-merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/timdettmers/guanaco-33b-merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [timdettmers/guanaco-33b-merged](https://huggingface.co/timdettmers/guanaco-33b-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_timdettmers__guanaco-33b-merged", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-20T14:01:44.026263](https://huggingface.co/datasets/open-llm-leaderboard/details_timdettmers__guanaco-33b-merged/blob/main/results_2023-07-20T14%3A01%3A44.026263.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5404553018205109, "acc_stderr": 0.03488622237927161, "acc_norm": 0.5444824613318672, "acc_norm_stderr": 0.03486249375448495, "mc1": 0.34761321909424725, "mc1_stderr": 0.016670769188897306, "mc2": 0.5121992740888713, "mc2_stderr": 0.014650490351006002 }, "harness|arc:challenge|25": { "acc": 0.5870307167235495, "acc_stderr": 0.014388344935398326, "acc_norm": 0.6245733788395904, "acc_norm_stderr": 0.014150631435111726 }, "harness|hellaswag|10": { "acc": 0.6446922923720374, "acc_stderr": 0.004776283203468098, "acc_norm": 0.8447520414260108, "acc_norm_stderr": 0.003614007841341989 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.04033565667848319, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5622641509433962, "acc_stderr": 0.030533338430467516, "acc_norm": 0.5622641509433962, "acc_norm_stderr": 0.030533338430467516 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5625, "acc_stderr": 0.04148415739394154, "acc_norm": 0.5625, "acc_norm_stderr": 0.04148415739394154 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5144508670520231, "acc_stderr": 0.03810871630454764, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383889, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383889 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099835, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939391, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939391 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4413793103448276, "acc_stderr": 0.04137931034482758, "acc_norm": 0.4413793103448276, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.31216931216931215, "acc_stderr": 0.0238652068369726, "acc_norm": 0.31216931216931215, "acc_norm_stderr": 0.0238652068369726 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147126, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147126 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6290322580645161, "acc_stderr": 0.027480541887953593, "acc_norm": 0.6290322580645161, "acc_norm_stderr": 0.027480541887953593 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.03413963805906235, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.03413963805906235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7212121212121212, "acc_stderr": 0.035014387062967806, "acc_norm": 0.7212121212121212, "acc_norm_stderr": 0.035014387062967806 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.03191178226713547, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.03191178226713547 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7150259067357513, "acc_stderr": 0.032577140777096614, "acc_norm": 0.7150259067357513, "acc_norm_stderr": 0.032577140777096614 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49230769230769234, "acc_stderr": 0.025348006031534778, "acc_norm": 0.49230769230769234, "acc_norm_stderr": 0.025348006031534778 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02730914058823019, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02730914058823019 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03242225027115006, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03242225027115006 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658752, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658752 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7302752293577982, "acc_stderr": 0.019028486711115438, "acc_norm": 0.7302752293577982, "acc_norm_stderr": 0.019028486711115438 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.375, "acc_stderr": 0.033016908987210894, "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967409, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967409 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.02747974455080851, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.02747974455080851 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5874439461883408, "acc_stderr": 0.03304062175449297, "acc_norm": 0.5874439461883408, "acc_norm_stderr": 0.03304062175449297 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6412213740458015, "acc_stderr": 0.04206739313864908, "acc_norm": 0.6412213740458015, "acc_norm_stderr": 0.04206739313864908 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908705, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6111111111111112, "acc_stderr": 0.04712821257426769, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.04712821257426769 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6380368098159509, "acc_stderr": 0.037757007291414416, "acc_norm": 0.6380368098159509, "acc_norm_stderr": 0.037757007291414416 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028546, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028546 }, "harness|hendrycksTest-management|5": { "acc": 0.6310679611650486, "acc_stderr": 0.0477761518115674, "acc_norm": 0.6310679611650486, "acc_norm_stderr": 0.0477761518115674 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7863247863247863, "acc_stderr": 0.026853450377009154, "acc_norm": 0.7863247863247863, "acc_norm_stderr": 0.026853450377009154 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6756066411238825, "acc_stderr": 0.0167409290471627, "acc_norm": 0.6756066411238825, "acc_norm_stderr": 0.0167409290471627 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5491329479768786, "acc_stderr": 0.026788811931562757, "acc_norm": 0.5491329479768786, "acc_norm_stderr": 0.026788811931562757 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.01461446582196632, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.01461446582196632 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5718954248366013, "acc_stderr": 0.028332397483664278, "acc_norm": 0.5718954248366013, "acc_norm_stderr": 0.028332397483664278 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6012861736334405, "acc_stderr": 0.027809322585774496, "acc_norm": 0.6012861736334405, "acc_norm_stderr": 0.027809322585774496 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6172839506172839, "acc_stderr": 0.027044538138402595, "acc_norm": 0.6172839506172839, "acc_norm_stderr": 0.027044538138402595 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.029525914302558555, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.029525914302558555 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.424380704041721, "acc_stderr": 0.01262334375743002, "acc_norm": 0.424380704041721, "acc_norm_stderr": 0.01262334375743002 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5808823529411765, "acc_stderr": 0.02997280717046462, "acc_norm": 0.5808823529411765, "acc_norm_stderr": 0.02997280717046462 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5212418300653595, "acc_stderr": 0.020209572388600248, "acc_norm": 0.5212418300653595, "acc_norm_stderr": 0.020209572388600248 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5918367346938775, "acc_stderr": 0.03146465712827424, "acc_norm": 0.5918367346938775, "acc_norm_stderr": 0.03146465712827424 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6915422885572139, "acc_stderr": 0.032658195885126966, "acc_norm": 0.6915422885572139, "acc_norm_stderr": 0.032658195885126966 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.041633319989322626, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322626 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.038786267710023595, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.038786267710023595 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7251461988304093, "acc_stderr": 0.03424042924691584, "acc_norm": 0.7251461988304093, "acc_norm_stderr": 0.03424042924691584 }, "harness|truthfulqa:mc|0": { "mc1": 0.34761321909424725, "mc1_stderr": 0.016670769188897306, "mc2": 0.5121992740888713, "mc2_stderr": 0.014650490351006002 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7246806621551514, -0.8223486542701721, 0.27890509366989136, 0.20532967150211334, -0.17411792278289795, -0.04318337142467499, -0.0065202112309634686, -0.22350038588047028, 0.5600948333740234, -0.06191413104534149, -0.4891355633735657, -0.6892867684364319, -0.42353683710098267, 0.22110615670681, -0.03653932735323906, 0.8400419354438782, -0.19666439294815063, -0.15201158821582794, 0.08298648148775101, -0.027408471331000328, -0.24070604145526886, -0.33789438009262085, -0.5135191679000854, -0.3599899411201477, 0.21348822116851807, 0.42662355303764343, 0.43593475222587585, 0.8087628483772278, 0.6842613220214844, 0.3009071350097656, -0.34153637290000916, 0.024705365300178528, -0.14739863574504852, -0.30480948090553284, 0.3672846257686615, -0.3187563717365265, -0.8563399910926819, 0.3188551366329193, 0.7446907162666321, 0.6092761754989624, -0.11741821467876434, 0.25185900926589966, 0.07494696229696274, 0.5441020727157593, -0.3574330806732178, 0.03592655062675476, -0.25831809639930725, 0.24566586315631866, -0.2025654911994934, -0.28139472007751465, -0.3038889765739441, -0.262988805770874, -0.15224884450435638, -0.8401721119880676, 0.2963765561580658, 0.3016282320022583, 1.5888952016830444, -0.17066490650177002, -0.24996480345726013, 0.11599193513393402, -0.13025613129138947, 1.0223709344863892, -0.8668965697288513, 0.3552430272102356, 0.7559807896614075, 0.1218257024884224, -0.1739780604839325, -0.5952032804489136, -0.6305203437805176, 0.09458041191101074, -0.3720848262310028, 0.3431988060474396, -0.09329932183027267, -0.14885194599628448, 0.34125661849975586, 0.6890420913696289, -0.6489688158035278, 0.1800118386745453, -0.6934031248092651, -0.15097874402999878, 1.0534422397613525, 0.35281869769096375, 0.07979916036128998, -0.3648691475391388, -0.6993057727813721, -0.6565030813217163, -0.3891400098800659, 0.23319196701049805, 0.4436206519603729, 0.3643754720687866, -0.4131527245044708, 0.6822609901428223, -0.406997412443161, 0.5693023204803467, 0.3851563632488251, 0.04085825756192207, 0.878763735294342, -0.6834699511528015, -0.5873560309410095, -0.08555030822753906, 1.1361606121063232, 0.5559754371643066, 0.08137919008731842, 0.20730316638946533, 0.016280747950077057, -0.13567180931568146, 0.046872302889823914, -0.8185489773750305, -0.2811187505722046, 0.18312494456768036, -0.3665807545185089, -0.5183278918266296, 0.37010157108306885, -0.8763601183891296, 0.1665613353252411, -0.02223513461649418, 0.3912676274776459, -0.5012645721435547, -0.10415513813495636, 0.2489859014749527, -0.4036821722984314, 0.827002763748169, -0.14448034763336182, -0.7833886742591858, 0.37346506118774414, 0.5022020936012268, 0.7879719138145447, -0.09496861696243286, -0.4615017771720886, -0.11465921998023987, -0.09602656960487366, -0.2721688747406006, 0.5356854200363159, -0.21608968079090118, -0.41322755813598633, -0.2726413607597351, 0.3028818964958191, -0.2826245427131653, -0.3395208716392517, 0.7504428625106812, -0.221891850233078, 0.2504560053348541, -0.39800721406936646, -0.592335045337677, 0.13707014918327332, 0.3559863567352295, -0.4025486409664154, 1.2897517681121826, 0.2740653455257416, -0.8002386689186096, 0.394849956035614, -0.5805712938308716, -0.18783330917358398, -0.015431872569024563, -0.016495326533913612, -0.7821062207221985, -0.25835609436035156, 0.18361447751522064, 0.4114941954612732, -0.18532417714595795, -0.11145224422216415, -0.36615148186683655, -0.3468744158744812, 0.35621199011802673, -0.16498634219169617, 1.2438167333602905, 0.0003309536259621382, -0.739486813545227, -0.09321901947259903, -1.1777321100234985, 0.3419210910797119, 0.17350268363952637, -0.35187652707099915, -0.1848505139350891, -0.5151629447937012, 0.004702606704086065, 0.19261300563812256, 0.2738928496837616, -0.7748123407363892, 0.281786173582077, -0.3123708665370941, 0.1954973191022873, 1.2490689754486084, 0.018756825476884842, 0.14695683121681213, -0.5724721550941467, 0.530360758304596, 0.20743346214294434, 0.21756280958652496, 0.4053916037082672, -0.5838577151298523, -0.8076131343841553, -0.536669135093689, -0.03794439136981964, 0.6003331542015076, -0.14575356245040894, 1.165061116218567, 0.06405732780694962, -0.8962233066558838, -0.4423382580280304, -0.12731756269931793, 0.48297637701034546, 0.7735359072685242, 0.6171244978904724, -0.051764242351055145, -0.6255472302436829, -1.0935496091842651, -0.26411890983581543, -0.18248605728149414, 0.11859230697154999, 0.18495653569698334, 1.0116819143295288, -0.2599545121192932, 0.5831456184387207, -1.048506259918213, -0.23019981384277344, 0.18744705617427826, -0.033369097858667374, 0.7739683389663696, 0.761348307132721, 0.5773844718933105, -0.6347857713699341, -0.5288059711456299, 0.24423818290233612, -0.910487949848175, -0.10144408047199249, 0.13642488420009613, -0.2905068099498749, 0.10949955880641937, 0.15093190968036652, -0.7128950357437134, 0.5179799795150757, 0.2112162709236145, -1.0437276363372803, 1.0224065780639648, -0.35911619663238525, 0.6341056227684021, -1.013135552406311, 0.1521461308002472, -0.046824775636196136, 0.02604912593960762, -0.49017763137817383, 0.05661178007721901, 0.1109359934926033, 0.45225638151168823, -0.5169998407363892, 0.781461775302887, -0.689164400100708, -0.02836833894252777, 0.4445391297340393, 0.12039873749017715, -0.11922213435173035, 0.3439730703830719, -0.27352476119995117, 0.811036229133606, 0.7288730144500732, -0.45238837599754333, 0.5343248248100281, 0.4039818048477173, -0.19055728614330292, 0.7395071387290955, -0.4781383275985718, -0.29061397910118103, 0.30069246888160706, -0.07910674065351486, -0.8421378135681152, -0.4944218397140503, 0.02397618256509304, -0.590396523475647, -0.11385539919137955, 0.33960166573524475, -0.2692567706108093, -0.8208674192428589, -0.9339308738708496, 0.33651211857795715, 0.7196320295333862, -0.442836195230484, -0.16285818815231323, 0.037798184901475906, 0.09389948844909668, -0.827340304851532, -0.8756850361824036, -0.47268688678741455, -0.2475445717573166, -0.7435283660888672, 0.33621665835380554, -0.24139612913131714, -0.2828799784183502, -0.06295057386159897, -0.20805327594280243, -0.3608463406562805, -0.050034623593091965, 0.139097660779953, 0.6692584753036499, -0.4061061143875122, -0.30606046319007874, -0.21690884232521057, -0.13757766783237457, 0.21888798475265503, -0.11913219839334488, 0.37672823667526245, -0.4796377122402191, -0.3675883412361145, -0.4247157573699951, -0.031999994069337845, 0.7536529302597046, -0.10096856206655502, 0.7500349283218384, 0.4230966866016388, -0.31321650743484497, -0.01165684498846531, -0.3164495825767517, -0.23311324417591095, -0.5806885361671448, 0.23476949334144592, -0.4737263023853302, -1.0470783710479736, 0.8283530473709106, 0.5473621487617493, 0.004206660203635693, 1.1348665952682495, 0.5850153565406799, -0.3258342444896698, 0.972883403301239, 0.04372797906398773, 0.34741199016571045, 0.3928285837173462, -0.6837847828865051, 0.10453850030899048, -0.9191038608551025, -0.3440609574317932, -0.5994303226470947, -0.4529320001602173, -0.7061846256256104, -0.08748122304677963, 0.23908954858779907, 0.14992117881774902, -0.6621053218841553, 0.607424259185791, -0.8336403369903564, 0.5930587649345398, 0.5479413270950317, 0.2653900980949402, 0.17878803610801697, -0.14672817289829254, -0.3943072259426117, -0.12463946640491486, -0.4677741229534149, -0.2225653976202011, 1.20880925655365, 0.2614765763282776, 0.7316277623176575, 0.07952910661697388, 0.8961643576622009, 0.12058144062757492, -0.08479178696870804, -0.5887437462806702, 0.6363880634307861, 0.11430308222770691, -0.7630264163017273, -0.3932983875274658, -0.5088527798652649, -1.08126962184906, 0.39194440841674805, -0.12611357867717743, -0.8529650568962097, 0.13961189985275269, -0.0008961900020949543, -0.18918897211551666, 0.5022187829017639, -0.5640304088592529, 0.8296616077423096, -0.1273515522480011, -0.4893907904624939, 0.07712205499410629, -0.7816348075866699, 0.4859354496002197, 0.18375279009342194, 0.23410415649414062, 0.07407660782337189, 0.2848551273345947, 1.176620364189148, -0.8523824214935303, 0.3979206681251526, 0.12265240401029587, 0.033182162791490555, 0.331810861825943, -0.19979706406593323, 0.4862396717071533, 0.07630373537540436, 0.016887662932276726, -0.10010960698127747, 0.2603262662887573, -0.8495656251907349, -0.0825270339846611, 0.9563146829605103, -0.9390245079994202, -0.579459547996521, -0.9302353262901306, -0.5424229502677917, 0.10044120997190475, 0.5517706274986267, 0.36921313405036926, 0.5250287652015686, 0.007133293431252241, 0.45710495114326477, 0.853696346282959, -0.10252958536148071, 0.6075250506401062, 0.2253890484571457, 0.12389674037694931, -0.7242302894592285, 0.7784736156463623, 0.07426516711711884, 0.33076977729797363, 0.26240774989128113, 0.3621307611465454, -0.5442330241203308, -0.21782521903514862, -0.16869595646858215, 0.5247802138328552, -0.6236383318901062, -0.28349459171295166, -0.3415261507034302, -0.4089553654193878, -0.7441397905349731, -0.6507796049118042, -0.30539727210998535, -0.4944223463535309, -0.46324679255485535, -0.4871607720851898, 0.580908477306366, 0.467829167842865, -0.40705838799476624, 0.09487979114055634, -0.5046733617782593, 0.20980747044086456, 0.3312087655067444, 0.5483978390693665, -0.391470730304718, -0.5692647099494934, 0.021219860762357712, -0.14990009367465973, -0.5545202493667603, -0.9639232754707336, 0.35224926471710205, -0.1031179204583168, 0.5038141012191772, 0.5836747288703918, 0.06737087666988373, 0.8715575337409973, -0.17846788465976715, 1.0109390020370483, 0.33543452620506287, -0.8004802465438843, 0.7454296350479126, -0.3569524586200714, 0.17192815244197845, 0.6392353773117065, 0.16987694799900055, -0.17941972613334656, -0.6920328736305237, -1.3229560852050781, -0.7963182926177979, 0.6644906401634216, 0.4087066054344177, -0.2878367304801941, 0.03993390500545502, 0.12774133682250977, -0.328948438167572, -0.18040965497493744, -0.6650319695472717, -0.8682011365890503, -0.1273900866508484, -0.509530782699585, 0.11227858066558838, 0.04566831514239311, -0.4386173486709595, -0.8334105610847473, 0.8900008797645569, 0.02023526281118393, 0.5901729464530945, 0.4565322995185852, 0.05810622125864029, 0.06971447169780731, 0.5021777749061584, 0.9381048679351807, 0.7375982999801636, -0.49206987023353577, 0.42965349555015564, 0.39197033643722534, -1.0495110750198364, 0.4931996464729309, 0.34750184416770935, -0.06521672010421753, -0.029142988845705986, 0.4580961763858795, 0.4183083474636078, 0.03728526085615158, -0.171072855591774, 0.5961018800735474, -0.02911238744854927, -0.5661133527755737, -0.3731707036495209, 0.08903367072343826, -0.14168882369995117, 0.013718866743147373, 0.42527109384536743, -0.1414456069469452, -0.07920321077108383, -0.4891517758369446, 0.4872989058494568, 0.3752303123474121, -0.48584887385368347, -0.14612208306789398, 0.7424077391624451, -0.17600169777870178, -0.13966748118400574, 0.3619087338447571, -0.16739025712013245, -0.6061480045318604, 1.1215211153030396, 0.587585985660553, 0.7373471260070801, -0.2688010334968567, -0.05537256598472595, 0.9050560593605042, 0.41402873396873474, -0.06331530958414078, 0.48886483907699585, 0.314461886882782, -0.24729382991790771, 0.15370933711528778, -0.81623774766922, -0.04435216635465622, 0.17524145543575287, -0.8525238633155823, 0.35960257053375244, -0.5237365961074829, -0.20521436631679535, 0.0005114242085255682, 0.4098077416419983, -0.45389029383659363, 0.5738310217857361, -0.44857221841812134, 1.2013111114501953, -1.0038164854049683, 0.7378648519515991, 0.7594341039657593, -0.5702825784683228, -1.0130404233932495, -0.5166359543800354, -0.011236600577831268, -0.7561150193214417, 0.5722790956497192, -0.08048997074365616, 0.17088815569877625, -0.08796623349189758, -0.6874650716781616, -0.8707706332206726, 1.4154783487319946, -0.03206639364361763, -0.40693414211273193, 0.20680974423885345, -0.056557547301054, 0.4577966034412384, 0.13530795276165009, 0.6214250326156616, 0.7403045296669006, 0.8135098814964294, -0.08203256875276566, -0.7559345960617065, 0.3438149094581604, -0.48272642493247986, -0.3145798146724701, 0.4848746657371521, -0.912354052066803, 1.2317794561386108, 0.018265509977936745, 0.2080923169851303, -0.1848173886537552, 0.6785665154457092, 0.7617709636688232, 0.26287272572517395, 0.3291025161743164, 0.9086272716522217, 0.8714450001716614, -0.5460485219955444, 0.9917020797729492, -0.1856868863105774, 0.9059821963310242, 0.6707544922828674, 0.25081899762153625, 0.748762845993042, 0.6888858675956726, -0.5548291802406311, 0.5616168975830078, 0.7821024060249329, -0.3110436201095581, 0.38208112120628357, 0.23635338246822357, -0.1572110801935196, -0.1230766773223877, 0.4272148907184601, -0.895255982875824, 0.10829518735408783, 0.047252360731363297, -0.3601002097129822, 0.06624863296747208, -0.4409239590167999, 0.31003740429878235, -0.05649774521589279, -0.008347252383828163, 0.32967883348464966, 0.028675857931375504, -0.4250244200229645, 0.9512813091278076, -0.14057928323745728, 0.7685869336128235, -0.552902340888977, -0.058130428194999695, -0.40771350264549255, 0.6131221652030945, -0.4531666040420532, -1.0826627016067505, 0.1906937211751938, 0.06190004572272301, -0.13256655633449554, -0.18312954902648926, 0.6851718425750732, -0.21643930673599243, -0.7276862263679504, 0.15335644781589508, 0.056709107011556625, 0.06258730590343475, 0.5200204253196716, -0.6339073181152344, -0.3124542236328125, -0.03809772804379463, -0.4897756576538086, 0.12523923814296722, 0.2848798334598541, 0.2631303668022156, 0.5396866202354431, 0.6208466291427612, 0.17356668412685394, 0.3906248211860657, -0.5764397382736206, 0.7864195108413696, -1.0722602605819702, -0.7305813431739807, -0.902040958404541, 0.45803117752075195, -0.33369067311286926, -0.8708343505859375, 0.9890931248664856, 1.0495893955230713, 0.9031723141670227, -0.031329721212387085, 0.6387126445770264, -0.39425140619277954, 0.20966891944408417, -0.40766915678977966, 0.9585707187652588, -0.8389177322387695, -0.2157723307609558, -0.25176653265953064, -0.7133771181106567, -0.38233715295791626, 0.8222424387931824, -0.1584189534187317, 0.03910233452916145, 1.100919485092163, 0.6603249311447144, -0.14669476449489594, 0.01783747784793377, -0.06843070685863495, 0.5396406054496765, 0.3779228925704956, 1.000102162361145, 0.6406649947166443, -0.8009102940559387, 0.3508685231208801, -0.5013043880462646, -0.4065057635307312, -0.38717344403266907, -0.4581979215145111, -0.8613520264625549, -0.5094403028488159, -0.24669116735458374, -0.6341797113418579, -0.09115613996982574, 0.9874270558357239, 0.4518245458602905, -0.903128981590271, -0.4457923471927643, -0.124150849878788, 0.16925421357154846, -0.5933787822723389, -0.42393937706947327, 0.6967985033988953, -0.11245257407426834, -0.5864890217781067, 0.19352295994758606, -0.13250702619552612, 0.2449546903371811, 0.12239750474691391, -0.4126054048538208, -0.7317831516265869, 0.01536653097718954, 0.4200303256511688, 0.37218379974365234, -0.6880760788917542, -0.705406665802002, 0.2919877767562866, -0.5567542910575867, 0.44906800985336304, 0.01020131353288889, -0.5240800976753235, 0.028606774285435677, 0.7257099151611328, 0.4579930603504181, 0.6828450560569763, -0.013357764109969139, 0.1246272549033165, -0.6871320009231567, 0.2039816677570343, -0.025356587022542953, 0.273896187543869, -0.033973272889852524, -0.2957819998264313, 0.8007177114486694, 0.6757179498672485, -0.5205784440040588, -1.094515323638916, -0.45419490337371826, -1.458583950996399, 0.005162165034562349, 1.0931147336959839, 0.02445075660943985, -0.4829372465610504, 0.27586790919303894, -0.11888334155082703, 0.22328363358974457, -0.3105337619781494, 0.7274612188339233, 0.8019329905509949, -0.3608976900577545, 0.09534107893705368, -0.6283514499664307, 0.3582462966442108, 0.516953706741333, -1.1949042081832886, -0.10320731997489929, 0.24032710492610931, 0.30482685565948486, 0.3464432954788208, 0.608294665813446, -0.12438062578439713, 0.2643929719924927, 0.21328388154506683, 0.06309309601783752, -0.03607510030269623, 0.04970301687717438, -0.2632676959037781, 0.07245676964521408, -0.2636623680591583, -0.4679996371269226 ]
open-llm-leaderboard/details_klosax__open_llama_7b_400bt_preview
open-llm-leaderboard
2023-08-27T12:34:31Z
201
0
[ "region:us" ]
null
2023-08-18T11:30:56Z
--- pretty_name: Evaluation run of klosax/open_llama_7b_400bt_preview dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [klosax/open_llama_7b_400bt_preview](https://huggingface.co/klosax/open_llama_7b_400bt_preview)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_klosax__open_llama_7b_400bt_preview\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-24T11:21:09.797599](https://huggingface.co/datasets/open-llm-leaderboard/details_klosax__open_llama_7b_400bt_preview/blob/main/results_2023-07-24T11%3A21%3A09.797599.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2815767296994265,\n\ \ \"acc_stderr\": 0.03231700614609591,\n \"acc_norm\": 0.2849357778261771,\n\ \ \"acc_norm_stderr\": 0.03231676634268911,\n \"mc1\": 0.22888616891064872,\n\ \ \"mc1_stderr\": 0.014706994909055027,\n \"mc2\": 0.360402821143995,\n\ \ \"mc2_stderr\": 0.013409179932482647\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.36177474402730375,\n \"acc_stderr\": 0.014041957945038064,\n\ \ \"acc_norm\": 0.39505119453924914,\n \"acc_norm_stderr\": 0.014285898292938169\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4939255128460466,\n\ \ \"acc_stderr\": 0.004989413158034797,\n \"acc_norm\": 0.658832901812388,\n\ \ \"acc_norm_stderr\": 0.004731324409133264\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3111111111111111,\n\ \ \"acc_stderr\": 0.03999262876617722,\n \"acc_norm\": 0.3111111111111111,\n\ \ \"acc_norm_stderr\": 0.03999262876617722\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.28289473684210525,\n \"acc_stderr\": 0.03665349695640767,\n\ \ \"acc_norm\": 0.28289473684210525,\n \"acc_norm_stderr\": 0.03665349695640767\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n\ \ \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \ \ \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.2981132075471698,\n \"acc_stderr\": 0.028152837942493857,\n\ \ \"acc_norm\": 0.2981132075471698,\n \"acc_norm_stderr\": 0.028152837942493857\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n\ \ \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n\ \ \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n\ \ \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2543352601156069,\n\ \ \"acc_stderr\": 0.0332055644308557,\n \"acc_norm\": 0.2543352601156069,\n\ \ \"acc_norm_stderr\": 0.0332055644308557\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237655,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237655\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.19574468085106383,\n \"acc_stderr\": 0.025937853139977148,\n\ \ \"acc_norm\": 0.19574468085106383,\n \"acc_norm_stderr\": 0.025937853139977148\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.296551724137931,\n \"acc_stderr\": 0.03806142687309993,\n\ \ \"acc_norm\": 0.296551724137931,\n \"acc_norm_stderr\": 0.03806142687309993\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2671957671957672,\n \"acc_stderr\": 0.02278967314577656,\n \"\ acc_norm\": 0.2671957671957672,\n \"acc_norm_stderr\": 0.02278967314577656\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n\ \ \"acc_stderr\": 0.03333333333333337,\n \"acc_norm\": 0.16666666666666666,\n\ \ \"acc_norm_stderr\": 0.03333333333333337\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25161290322580643,\n\ \ \"acc_stderr\": 0.024685979286239956,\n \"acc_norm\": 0.25161290322580643,\n\ \ \"acc_norm_stderr\": 0.024685979286239956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.28078817733990147,\n \"acc_stderr\": 0.03161856335358609,\n\ \ \"acc_norm\": 0.28078817733990147,\n \"acc_norm_stderr\": 0.03161856335358609\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\ : 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03453131801885415,\n\ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03453131801885415\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.3434343434343434,\n \"acc_stderr\": 0.033832012232444426,\n \"\ acc_norm\": 0.3434343434343434,\n \"acc_norm_stderr\": 0.033832012232444426\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n\ \ \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.358974358974359,\n \"acc_stderr\": 0.024321738484602357,\n \ \ \"acc_norm\": 0.358974358974359,\n \"acc_norm_stderr\": 0.024321738484602357\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26296296296296295,\n \"acc_stderr\": 0.02684205787383371,\n \ \ \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.02684205787383371\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\ \ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2582781456953642,\n \"acc_stderr\": 0.035737053147634576,\n \"\ acc_norm\": 0.2582781456953642,\n \"acc_norm_stderr\": 0.035737053147634576\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.3486238532110092,\n \"acc_stderr\": 0.020431254090714328,\n \"\ acc_norm\": 0.3486238532110092,\n \"acc_norm_stderr\": 0.020431254090714328\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4398148148148148,\n \"acc_stderr\": 0.033851779760448106,\n \"\ acc_norm\": 0.4398148148148148,\n \"acc_norm_stderr\": 0.033851779760448106\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.27941176470588236,\n \"acc_stderr\": 0.031493281045079556,\n \"\ acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.031493281045079556\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.25316455696202533,\n \"acc_stderr\": 0.028304657943035303,\n \ \ \"acc_norm\": 0.25316455696202533,\n \"acc_norm_stderr\": 0.028304657943035303\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.13901345291479822,\n\ \ \"acc_stderr\": 0.023219352834474464,\n \"acc_norm\": 0.13901345291479822,\n\ \ \"acc_norm_stderr\": 0.023219352834474464\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2824427480916031,\n \"acc_stderr\": 0.03948406125768361,\n\ \ \"acc_norm\": 0.2824427480916031,\n \"acc_norm_stderr\": 0.03948406125768361\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.38016528925619836,\n \"acc_stderr\": 0.04431324501968432,\n \"\ acc_norm\": 0.38016528925619836,\n \"acc_norm_stderr\": 0.04431324501968432\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n\ \ \"acc_stderr\": 0.039578354719809805,\n \"acc_norm\": 0.21296296296296297,\n\ \ \"acc_norm_stderr\": 0.039578354719809805\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.3006134969325153,\n \"acc_stderr\": 0.03602511318806771,\n\ \ \"acc_norm\": 0.3006134969325153,\n \"acc_norm_stderr\": 0.03602511318806771\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.16071428571428573,\n\ \ \"acc_stderr\": 0.03485946096475741,\n \"acc_norm\": 0.16071428571428573,\n\ \ \"acc_norm_stderr\": 0.03485946096475741\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.3786407766990291,\n \"acc_stderr\": 0.04802694698258972,\n\ \ \"acc_norm\": 0.3786407766990291,\n \"acc_norm_stderr\": 0.04802694698258972\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2564102564102564,\n\ \ \"acc_stderr\": 0.02860595370200425,\n \"acc_norm\": 0.2564102564102564,\n\ \ \"acc_norm_stderr\": 0.02860595370200425\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368445,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368445\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.20434227330779056,\n\ \ \"acc_stderr\": 0.0144191239809319,\n \"acc_norm\": 0.20434227330779056,\n\ \ \"acc_norm_stderr\": 0.0144191239809319\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.29190751445086704,\n \"acc_stderr\": 0.02447699407624734,\n\ \ \"acc_norm\": 0.29190751445086704,\n \"acc_norm_stderr\": 0.02447699407624734\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n\ \ \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n\ \ \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.02495418432487991,\n\ \ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.02495418432487991\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2797427652733119,\n\ \ \"acc_stderr\": 0.02549425935069489,\n \"acc_norm\": 0.2797427652733119,\n\ \ \"acc_norm_stderr\": 0.02549425935069489\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.22530864197530864,\n \"acc_stderr\": 0.023246202647819746,\n\ \ \"acc_norm\": 0.22530864197530864,\n \"acc_norm_stderr\": 0.023246202647819746\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2695035460992908,\n \"acc_stderr\": 0.026469036818590638,\n \ \ \"acc_norm\": 0.2695035460992908,\n \"acc_norm_stderr\": 0.026469036818590638\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.26597131681877445,\n\ \ \"acc_stderr\": 0.011285033165551269,\n \"acc_norm\": 0.26597131681877445,\n\ \ \"acc_norm_stderr\": 0.011285033165551269\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n\ \ \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.22549019607843138,\n \"acc_stderr\": 0.016906615927288145,\n \ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.016906615927288145\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.20909090909090908,\n\ \ \"acc_stderr\": 0.038950910157241364,\n \"acc_norm\": 0.20909090909090908,\n\ \ \"acc_norm_stderr\": 0.038950910157241364\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.031362502409358936,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.031362502409358936\n \ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24875621890547264,\n\ \ \"acc_stderr\": 0.030567675938916707,\n \"acc_norm\": 0.24875621890547264,\n\ \ \"acc_norm_stderr\": 0.030567675938916707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.1927710843373494,\n\ \ \"acc_stderr\": 0.030709824050565274,\n \"acc_norm\": 0.1927710843373494,\n\ \ \"acc_norm_stderr\": 0.030709824050565274\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.29239766081871343,\n \"acc_stderr\": 0.034886477134579215,\n\ \ \"acc_norm\": 0.29239766081871343,\n \"acc_norm_stderr\": 0.034886477134579215\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22888616891064872,\n\ \ \"mc1_stderr\": 0.014706994909055027,\n \"mc2\": 0.360402821143995,\n\ \ \"mc2_stderr\": 0.013409179932482647\n }\n}\n```" repo_url: https://huggingface.co/klosax/open_llama_7b_400bt_preview leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|arc:challenge|25_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hellaswag|10_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:21:09.797599.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:21:09.797599.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T11_21_09.797599 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:21:09.797599.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:21:09.797599.parquet' - config_name: results data_files: - split: 2023_07_24T11_21_09.797599 path: - results_2023-07-24T11:21:09.797599.parquet - split: latest path: - results_2023-07-24T11:21:09.797599.parquet --- # Dataset Card for Evaluation run of klosax/open_llama_7b_400bt_preview ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/klosax/open_llama_7b_400bt_preview - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [klosax/open_llama_7b_400bt_preview](https://huggingface.co/klosax/open_llama_7b_400bt_preview) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_klosax__open_llama_7b_400bt_preview", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-24T11:21:09.797599](https://huggingface.co/datasets/open-llm-leaderboard/details_klosax__open_llama_7b_400bt_preview/blob/main/results_2023-07-24T11%3A21%3A09.797599.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2815767296994265, "acc_stderr": 0.03231700614609591, "acc_norm": 0.2849357778261771, "acc_norm_stderr": 0.03231676634268911, "mc1": 0.22888616891064872, "mc1_stderr": 0.014706994909055027, "mc2": 0.360402821143995, "mc2_stderr": 0.013409179932482647 }, "harness|arc:challenge|25": { "acc": 0.36177474402730375, "acc_stderr": 0.014041957945038064, "acc_norm": 0.39505119453924914, "acc_norm_stderr": 0.014285898292938169 }, "harness|hellaswag|10": { "acc": 0.4939255128460466, "acc_stderr": 0.004989413158034797, "acc_norm": 0.658832901812388, "acc_norm_stderr": 0.004731324409133264 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3111111111111111, "acc_stderr": 0.03999262876617722, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.03999262876617722 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.28289473684210525, "acc_stderr": 0.03665349695640767, "acc_norm": 0.28289473684210525, "acc_norm_stderr": 0.03665349695640767 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2981132075471698, "acc_stderr": 0.028152837942493857, "acc_norm": 0.2981132075471698, "acc_norm_stderr": 0.028152837942493857 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2543352601156069, "acc_stderr": 0.0332055644308557, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.0332055644308557 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.19574468085106383, "acc_stderr": 0.025937853139977148, "acc_norm": 0.19574468085106383, "acc_norm_stderr": 0.025937853139977148 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.296551724137931, "acc_stderr": 0.03806142687309993, "acc_norm": 0.296551724137931, "acc_norm_stderr": 0.03806142687309993 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.02278967314577656, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.02278967314577656 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333337, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333337 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.024685979286239956, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.024685979286239956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.03161856335358609, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.03161856335358609 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885415, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885415 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3434343434343434, "acc_stderr": 0.033832012232444426, "acc_norm": 0.3434343434343434, "acc_norm_stderr": 0.033832012232444426 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.358974358974359, "acc_stderr": 0.024321738484602357, "acc_norm": 0.358974358974359, "acc_norm_stderr": 0.024321738484602357 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3486238532110092, "acc_stderr": 0.020431254090714328, "acc_norm": 0.3486238532110092, "acc_norm_stderr": 0.020431254090714328 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4398148148148148, "acc_stderr": 0.033851779760448106, "acc_norm": 0.4398148148148148, "acc_norm_stderr": 0.033851779760448106 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.031493281045079556, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.031493281045079556 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25316455696202533, "acc_stderr": 0.028304657943035303, "acc_norm": 0.25316455696202533, "acc_norm_stderr": 0.028304657943035303 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.13901345291479822, "acc_stderr": 0.023219352834474464, "acc_norm": 0.13901345291479822, "acc_norm_stderr": 0.023219352834474464 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.38016528925619836, "acc_stderr": 0.04431324501968432, "acc_norm": 0.38016528925619836, "acc_norm_stderr": 0.04431324501968432 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.21296296296296297, "acc_stderr": 0.039578354719809805, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.039578354719809805 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3006134969325153, "acc_stderr": 0.03602511318806771, "acc_norm": 0.3006134969325153, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.16071428571428573, "acc_stderr": 0.03485946096475741, "acc_norm": 0.16071428571428573, "acc_norm_stderr": 0.03485946096475741 }, "harness|hendrycksTest-management|5": { "acc": 0.3786407766990291, "acc_stderr": 0.04802694698258972, "acc_norm": 0.3786407766990291, "acc_norm_stderr": 0.04802694698258972 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2564102564102564, "acc_stderr": 0.02860595370200425, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.02860595370200425 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.20434227330779056, "acc_stderr": 0.0144191239809319, "acc_norm": 0.20434227330779056, "acc_norm_stderr": 0.0144191239809319 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.29190751445086704, "acc_stderr": 0.02447699407624734, "acc_norm": 0.29190751445086704, "acc_norm_stderr": 0.02447699407624734 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2549019607843137, "acc_stderr": 0.02495418432487991, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2797427652733119, "acc_stderr": 0.02549425935069489, "acc_norm": 0.2797427652733119, "acc_norm_stderr": 0.02549425935069489 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.22530864197530864, "acc_stderr": 0.023246202647819746, "acc_norm": 0.22530864197530864, "acc_norm_stderr": 0.023246202647819746 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590638, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590638 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.26597131681877445, "acc_stderr": 0.011285033165551269, "acc_norm": 0.26597131681877445, "acc_norm_stderr": 0.011285033165551269 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.22549019607843138, "acc_stderr": 0.016906615927288145, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.016906615927288145 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.038950910157241364, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.038950910157241364 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.031362502409358936, "acc_norm": 0.4, "acc_norm_stderr": 0.031362502409358936 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916707, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916707 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.1927710843373494, "acc_stderr": 0.030709824050565274, "acc_norm": 0.1927710843373494, "acc_norm_stderr": 0.030709824050565274 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.29239766081871343, "acc_stderr": 0.034886477134579215, "acc_norm": 0.29239766081871343, "acc_norm_stderr": 0.034886477134579215 }, "harness|truthfulqa:mc|0": { "mc1": 0.22888616891064872, "mc1_stderr": 0.014706994909055027, "mc2": 0.360402821143995, "mc2_stderr": 0.013409179932482647 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7219201922416687, -0.8243870735168457, 0.294461190700531, 0.23749208450317383, -0.18559208512306213, -0.00035543343983590603, 0.024134738370776176, -0.24563181400299072, 0.5901058912277222, -0.06438136100769043, -0.471303254365921, -0.6810396909713745, -0.46161821484565735, 0.2651706039905548, -0.02214505523443222, 0.8225446939468384, -0.21675971150398254, -0.14639407396316528, 0.07304081320762634, -0.04986726492643356, -0.23473834991455078, -0.3398490250110626, -0.44661179184913635, -0.37718889117240906, 0.21125108003616333, 0.4607725143432617, 0.44601672887802124, 0.8269305229187012, 0.678861141204834, 0.2832108736038208, -0.34523460268974304, -0.017238933593034744, -0.1824956089258194, -0.29560762643814087, 0.41066136956214905, -0.36562272906303406, -0.8372978568077087, 0.2763885259628296, 0.7685919404029846, 0.6546981334686279, -0.08442536741495132, 0.30933520197868347, 0.025047611445188522, 0.5621271133422852, -0.3599630892276764, 0.06135837733745575, -0.2582723796367645, 0.23371891677379608, -0.19611430168151855, -0.24914629757404327, -0.3007558584213257, -0.2733854055404663, -0.16401022672653198, -0.8799763321876526, 0.23739323019981384, 0.31870806217193604, 1.5720624923706055, -0.14919409155845642, -0.25516802072525024, 0.07668837904930115, -0.08545609563589096, 0.9962506294250488, -0.864919126033783, 0.37472113966941833, 0.7949290871620178, 0.11968986690044403, -0.2108282893896103, -0.5733420252799988, -0.6250831484794617, 0.04295146092772484, -0.3633072078227997, 0.3241971731185913, -0.07122012227773666, -0.20017047226428986, 0.37341269850730896, 0.6768922209739685, -0.6497130990028381, 0.1857319325208664, -0.678098201751709, -0.19246795773506165, 1.0480862855911255, 0.32899603247642517, 0.026058504357933998, -0.3524162471294403, -0.6902897357940674, -0.6713413000106812, -0.4134986698627472, 0.2632923722267151, 0.44843679666519165, 0.3358115255832672, -0.4084658920764923, 0.6858027577400208, -0.434868723154068, 0.5392919182777405, 0.4111030399799347, -0.029012760147452354, 0.930019199848175, -0.6410684585571289, -0.5436062216758728, -0.03242224454879761, 1.0866608619689941, 0.5630699992179871, 0.032961755990982056, 0.21612757444381714, 0.03940650820732117, -0.08502966165542603, 0.01863585226237774, -0.8680368661880493, -0.2678907513618469, 0.17094609141349792, -0.3954458236694336, -0.5229737162590027, 0.293713241815567, -0.8859550356864929, 0.13638536632061005, -0.01651357114315033, 0.37839704751968384, -0.4972541034221649, -0.10750111937522888, 0.25991976261138916, -0.39036503434181213, 0.8095107078552246, -0.18711067736148834, -0.7616456747055054, 0.38377341628074646, 0.4838118553161621, 0.7497733235359192, -0.07188520580530167, -0.439066618680954, -0.11010532081127167, -0.11549031734466553, -0.30005043745040894, 0.536383867263794, -0.2699477970600128, -0.4471653401851654, -0.30245399475097656, 0.31087714433670044, -0.22892595827579498, -0.3339431881904602, 0.7298014163970947, -0.24895481765270233, 0.1775907725095749, -0.44509565830230713, -0.629402220249176, 0.11931265145540237, 0.3880651593208313, -0.41117483377456665, 1.282049298286438, 0.24900223314762115, -0.7926803827285767, 0.415958434343338, -0.5980680584907532, -0.15242861211299896, -0.03468810021877289, -0.00801029335707426, -0.7969194054603577, -0.28004011511802673, 0.19628790020942688, 0.3962710201740265, -0.14622223377227783, -0.1205347403883934, -0.400234580039978, -0.353664368391037, 0.3499847948551178, -0.12729966640472412, 1.1971794366836548, -0.010448860004544258, -0.7209581136703491, -0.14861658215522766, -1.231502652168274, 0.3114992380142212, 0.2393898367881775, -0.4006897211074829, -0.15253645181655884, -0.5097983479499817, -0.05497049540281296, 0.14885947108268738, 0.30235329270362854, -0.8123815655708313, 0.32345062494277954, -0.35324111580848694, 0.14611242711544037, 1.2892347574234009, 0.009603033773601055, 0.15925921499729156, -0.5293959379196167, 0.5527803897857666, 0.21691392362117767, 0.1829235851764679, 0.36304256319999695, -0.6014140844345093, -0.8032873272895813, -0.49154019355773926, -0.03321918472647667, 0.5716812014579773, -0.1785099357366562, 1.0950255393981934, 0.08410276472568512, -0.9332010746002197, -0.4538668096065521, -0.11661719530820847, 0.5253843665122986, 0.792026162147522, 0.5950026512145996, -0.059865858405828476, -0.6366903185844421, -1.1223535537719727, -0.26241105794906616, -0.16595487296581268, 0.15167634189128876, 0.23289214074611664, 1.0418791770935059, -0.3038283884525299, 0.5789129137992859, -1.0794109106063843, -0.17750978469848633, 0.19508284330368042, -0.06538109481334686, 0.7996841073036194, 0.7200260162353516, 0.5893164277076721, -0.6475334167480469, -0.5164021849632263, 0.2031601071357727, -0.8934781551361084, -0.1202586442232132, 0.15725551545619965, -0.3270849883556366, 0.14542311429977417, 0.12765371799468994, -0.6709718108177185, 0.5118902325630188, 0.22174221277236938, -1.0656688213348389, 1.0775781869888306, -0.33363762497901917, 0.5681270956993103, -1.0150718688964844, 0.19412434101104736, -0.07862840592861176, 0.04355986788868904, -0.48982998728752136, 0.030495796352624893, 0.11961840093135834, 0.4612884521484375, -0.49386611580848694, 0.780120313167572, -0.6914748549461365, -0.09664740413427353, 0.42992737889289856, 0.09706304222345352, -0.10480640828609467, 0.39426982402801514, -0.26063811779022217, 0.7845466732978821, 0.7395768165588379, -0.4692615568637848, 0.5172579884529114, 0.4431592524051666, -0.22767314314842224, 0.7090473175048828, -0.459018349647522, -0.2590842843055725, 0.28456225991249084, -0.019111137837171555, -0.8099363446235657, -0.5219415426254272, 0.067378468811512, -0.5892595648765564, -0.13232092559337616, 0.40985211730003357, -0.2834509015083313, -0.8323146104812622, -0.9241438508033752, 0.35401827096939087, 0.7035237550735474, -0.44713738560676575, -0.19901441037654877, 0.059273611754179, 0.10488394647836685, -0.8457357287406921, -0.8095086216926575, -0.469523549079895, -0.21084275841712952, -0.672746479511261, 0.31719720363616943, -0.2879480719566345, -0.26191383600234985, -0.10598808526992798, -0.23329924046993256, -0.30999794602394104, 0.008913781493902206, 0.14361579716205597, 0.6598575115203857, -0.4078841805458069, -0.3057329058647156, -0.2300158143043518, -0.16138312220573425, 0.21830491721630096, -0.08700370788574219, 0.40480461716651917, -0.5000978708267212, -0.40710780024528503, -0.4072757959365845, -0.009141912683844566, 0.7235018014907837, -0.05526525899767876, 0.6971957087516785, 0.4135313332080841, -0.30020880699157715, 0.03264709934592247, -0.29116639494895935, -0.24433660507202148, -0.5825698971748352, 0.2783789336681366, -0.4830230474472046, -1.0593053102493286, 0.7969781756401062, 0.5240375399589539, 0.02180987410247326, 1.1012911796569824, 0.6282141804695129, -0.2837594747543335, 1.008880615234375, 0.08606770634651184, 0.31935110688209534, 0.36748674511909485, -0.6705053448677063, 0.10897956788539886, -0.9286548495292664, -0.34439945220947266, -0.5685027241706848, -0.4853203594684601, -0.6995445489883423, -0.058418575674295425, 0.281785249710083, 0.1629319190979004, -0.6938320398330688, 0.6058961749076843, -0.8457453846931458, 0.5919391512870789, 0.586065948009491, 0.2623331546783447, 0.15243050456047058, -0.156407430768013, -0.41508176922798157, -0.09266215562820435, -0.4382520318031311, -0.25279510021209717, 1.2306033372879028, 0.27913859486579895, 0.728351891040802, 0.06351175904273987, 0.8715248107910156, 0.09622936695814133, -0.05150985345244408, -0.5663727521896362, 0.646822452545166, 0.18586088716983795, -0.7643129229545593, -0.37017807364463806, -0.4835036098957062, -1.1049447059631348, 0.3934321701526642, -0.11114484071731567, -0.8808807730674744, 0.15327566862106323, 0.02591256983578205, -0.17798802256584167, 0.48933327198028564, -0.5662170648574829, 0.8079152703285217, -0.13064780831336975, -0.4395642578601837, 0.119850292801857, -0.8417056798934937, 0.4652886688709259, 0.16979043185710907, 0.26650238037109375, 0.022441839799284935, 0.2590731978416443, 1.18626070022583, -0.7995854020118713, 0.4438471794128418, 0.1115211695432663, 0.0059203277342021465, 0.30927765369415283, -0.16228370368480682, 0.5034773945808411, 0.05791032686829567, -0.026780743151903152, -0.06869783997535706, 0.27961376309394836, -0.8715091943740845, -0.024004105478525162, 0.9147100448608398, -0.9859941005706787, -0.6046656966209412, -0.8833402395248413, -0.5628588795661926, 0.07374245673418045, 0.5779011845588684, 0.3570641577243805, 0.4929196238517761, 0.03037230484187603, 0.45179155468940735, 0.7990921139717102, -0.14172285795211792, 0.6058258414268494, 0.25083741545677185, 0.09657954424619675, -0.7032738924026489, 0.8503815531730652, 0.08124653249979019, 0.36443832516670227, 0.24213430285453796, 0.39895540475845337, -0.5270368456840515, -0.17741616070270538, -0.22051584720611572, 0.5275834798812866, -0.6460027694702148, -0.3127690851688385, -0.37451666593551636, -0.3517474830150604, -0.7577885985374451, -0.673484742641449, -0.29974129796028137, -0.4868806302547455, -0.49130985140800476, -0.49943459033966064, 0.6050697565078735, 0.4866524636745453, -0.3999914824962616, 0.08119022101163864, -0.5036957859992981, 0.23769722878932953, 0.2929936349391937, 0.5658568739891052, -0.36020341515541077, -0.5331857800483704, 0.02177857607603073, -0.12598519027233124, -0.6130748987197876, -0.9718939661979675, 0.3716810941696167, -0.06295070797204971, 0.5099554657936096, 0.5803707242012024, 0.06021885946393013, 0.8474996089935303, -0.2042006254196167, 1.0428929328918457, 0.34871330857276917, -0.7919191122055054, 0.7253285646438599, -0.3194388449192047, 0.1765313595533371, 0.6059072017669678, 0.19297409057617188, -0.20329751074314117, -0.6948777437210083, -1.3112698793411255, -0.7970775961875916, 0.6824966073036194, 0.4109330475330353, -0.29962942004203796, 0.03920946642756462, 0.14899061620235443, -0.2563270628452301, -0.1432010680437088, -0.6983535289764404, -0.880276083946228, -0.16429740190505981, -0.47734203934669495, 0.09361206740140915, 0.02227683924138546, -0.39780738949775696, -0.8010383248329163, 0.9176425337791443, -0.023971932008862495, 0.600529134273529, 0.4744679927825928, 0.05995424464344978, 0.047861456871032715, 0.47779762744903564, 0.9505517482757568, 0.7294121980667114, -0.4597955644130707, 0.42051297426223755, 0.44639459252357483, -1.065608024597168, 0.5095800161361694, 0.3216639459133148, -0.03579820320010185, -0.04575752839446068, 0.4540747404098511, 0.41412267088890076, 0.03226891905069351, -0.22906501591205597, 0.5828465223312378, 0.01060391403734684, -0.5620240569114685, -0.3535688519477844, 0.07487727701663971, -0.10627473890781403, 0.026469336822628975, 0.3994995355606079, -0.15234053134918213, -0.05525341257452965, -0.4770587384700775, 0.43988585472106934, 0.34699583053588867, -0.4466903507709503, -0.16790366172790527, 0.7297224998474121, -0.2168925553560257, -0.16300952434539795, 0.3205818235874176, -0.18902960419654846, -0.6352137923240662, 1.1754944324493408, 0.6135170459747314, 0.6582202911376953, -0.26579296588897705, -0.08691751956939697, 0.9039162993431091, 0.376698762178421, -0.03588727116584778, 0.5038883090019226, 0.292797714471817, -0.25725415349006653, 0.17432749271392822, -0.9047552943229675, -0.08218321204185486, 0.1414257436990738, -0.8229812979698181, 0.30753883719444275, -0.4798297584056854, -0.17083677649497986, 0.014652321115136147, 0.4089057445526123, -0.45179298520088196, 0.5263188481330872, -0.37533944845199585, 1.209071397781372, -0.9863248467445374, 0.7062502503395081, 0.7466913461685181, -0.5437777638435364, -1.0237963199615479, -0.5577054023742676, -0.021544180810451508, -0.8483408689498901, 0.5951833128929138, -0.026231549680233, 0.18626433610916138, -0.10311806946992874, -0.7426114678382874, -0.9493635296821594, 1.4196990728378296, -0.0065508512780070305, -0.43673744797706604, 0.23924782872200012, -0.0219887662678957, 0.4507138729095459, 0.13931815326213837, 0.5683270692825317, 0.754614531993866, 0.8176055550575256, -0.07979260385036469, -0.7669104337692261, 0.3556489944458008, -0.5019857287406921, -0.3190954029560089, 0.4687857925891876, -0.9358106851577759, 1.195067286491394, -0.010481295175850391, 0.24261559545993805, -0.17143608629703522, 0.6664829850196838, 0.8208077549934387, 0.30118876695632935, 0.3707970380783081, 0.9069291353225708, 0.8722893595695496, -0.4826820194721222, 1.0142796039581299, -0.20350085198879242, 0.8761852383613586, 0.6451363563537598, 0.18471965193748474, 0.7352968454360962, 0.6854918003082275, -0.5509440898895264, 0.5565896034240723, 0.8165786862373352, -0.33023589849472046, 0.40641942620277405, 0.26148292422294617, -0.12300241738557816, -0.127878800034523, 0.4081190824508667, -0.9003486037254333, 0.1101195216178894, 0.07131613045930862, -0.35570695996284485, 0.0597960464656353, -0.42017263174057007, 0.3405262529850006, -0.10473036766052246, -0.04337935522198677, 0.34094035625457764, 0.0495658703148365, -0.4388082027435303, 0.966457188129425, -0.15305589139461517, 0.7536952495574951, -0.5155951380729675, -0.07909584045410156, -0.36995506286621094, 0.6031752228736877, -0.43574297428131104, -1.071820616722107, 0.16578835248947144, 0.06488120555877686, -0.1326790750026703, -0.18246963620185852, 0.6966411471366882, -0.15402522683143616, -0.7773492932319641, 0.15171509981155396, 0.03618214279413223, 0.11089702695608139, 0.5445214509963989, -0.6314130425453186, -0.3111661970615387, -0.04350809380412102, -0.5599255561828613, 0.15102486312389374, 0.2778032720088959, 0.26009225845336914, 0.5622693300247192, 0.6650751233100891, 0.13014622032642365, 0.3899868428707123, -0.5856258273124695, 0.8482508659362793, -1.0497626066207886, -0.7148512601852417, -0.9137203693389893, 0.42968299984931946, -0.3084163963794708, -0.8694884777069092, 0.9684870839118958, 1.04043710231781, 0.8839399218559265, -0.029398852959275246, 0.6120988130569458, -0.37440866231918335, 0.23913395404815674, -0.35013383626937866, 0.9401478171348572, -0.8509491682052612, -0.25634074211120605, -0.2751730978488922, -0.6807801723480225, -0.3930889964103699, 0.8256833553314209, -0.17300768196582794, 0.004218681249767542, 1.0476539134979248, 0.6852027773857117, -0.09357253462076187, 0.040762558579444885, -0.06436014920473099, 0.6021049618721008, 0.4005367159843445, 1.0037226676940918, 0.6467455625534058, -0.7771143317222595, 0.3679972290992737, -0.5109230875968933, -0.4048588275909424, -0.42320939898490906, -0.4902817904949188, -0.8253249526023865, -0.47753480076789856, -0.2045457512140274, -0.6230319142341614, -0.11541362851858139, 0.9967593550682068, 0.43969443440437317, -0.8917316198348999, -0.4178426265716553, -0.10547572374343872, 0.17978377640247345, -0.5814301371574402, -0.4079739451408386, 0.7229170799255371, -0.0945541188120842, -0.5444719195365906, 0.2085159420967102, -0.14242146909236908, 0.2081075757741928, 0.11841845512390137, -0.4137435257434845, -0.7115483283996582, 0.010422664694488049, 0.4480305314064026, 0.36365628242492676, -0.6851387023925781, -0.7251231670379639, 0.3336707055568695, -0.525637686252594, 0.45943483710289, -0.07254611700773239, -0.5435170531272888, 0.031037185341119766, 0.6670954823493958, 0.4560801684856415, 0.6667832732200623, -0.07155202329158783, 0.07876633107662201, -0.6633407473564148, 0.18708600103855133, 0.001883586053736508, 0.3174669146537781, -0.03852485492825508, -0.33657774329185486, 0.7783079147338867, 0.6672532558441162, -0.5580453872680664, -1.1307889223098755, -0.4228484332561493, -1.4594230651855469, -0.012703451327979565, 1.116050362586975, 0.040106240659952164, -0.5181406736373901, 0.2666657865047455, -0.16474217176437378, 0.19406090676784515, -0.33507999777793884, 0.7630382180213928, 0.7895165681838989, -0.3645019233226776, 0.14543071389198303, -0.6430391073226929, 0.3276357352733612, 0.516926646232605, -1.1680810451507568, -0.09779572486877441, 0.23660582304000854, 0.31698131561279297, 0.38350680470466614, 0.6321039795875549, -0.08928249776363373, 0.27378830313682556, 0.23266607522964478, 0.08352037519216537, -0.012088061310350895, 0.08739633113145828, -0.21249577403068542, 0.09750762581825256, -0.2540763020515442, -0.4629463255405426 ]
open-llm-leaderboard/details_klosax__open_llama_3b_350bt_preview
open-llm-leaderboard
2023-08-27T12:34:36Z
201
0
[ "region:us" ]
null
2023-08-18T11:31:24Z
--- pretty_name: Evaluation run of klosax/open_llama_3b_350bt_preview dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [klosax/open_llama_3b_350bt_preview](https://huggingface.co/klosax/open_llama_3b_350bt_preview)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_klosax__open_llama_3b_350bt_preview\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-24T10:25:13.548749](https://huggingface.co/datasets/open-llm-leaderboard/details_klosax__open_llama_3b_350bt_preview/blob/main/results_2023-07-24T10%3A25%3A13.548749.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.27221888756913304,\n\ \ \"acc_stderr\": 0.03212813724268037,\n \"acc_norm\": 0.2751900409341744,\n\ \ \"acc_norm_stderr\": 0.032129932454657235,\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.01450904517148729,\n \"mc2\": 0.35027279444600373,\n\ \ \"mc2_stderr\": 0.01335009503768823\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.34215017064846415,\n \"acc_stderr\": 0.01386415215917728,\n\ \ \"acc_norm\": 0.3651877133105802,\n \"acc_norm_stderr\": 0.014070265519268802\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4563831905994822,\n\ \ \"acc_stderr\": 0.004970759774676886,\n \"acc_norm\": 0.6086436964748058,\n\ \ \"acc_norm_stderr\": 0.004870563921220623\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2740740740740741,\n\ \ \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.2740740740740741,\n\ \ \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.23026315789473684,\n \"acc_stderr\": 0.03426059424403165,\n\ \ \"acc_norm\": 0.23026315789473684,\n \"acc_norm_stderr\": 0.03426059424403165\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.24,\n\ \ \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \ \ \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.30566037735849055,\n \"acc_stderr\": 0.028353298073322666,\n\ \ \"acc_norm\": 0.30566037735849055,\n \"acc_norm_stderr\": 0.028353298073322666\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.22916666666666666,\n\ \ \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.22916666666666666,\n\ \ \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n\ \ \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n\ \ \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n\ \ \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.14705882352941177,\n \"acc_stderr\": 0.03524068951567447,\n\ \ \"acc_norm\": 0.14705882352941177,\n \"acc_norm_stderr\": 0.03524068951567447\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n\ \ \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.33191489361702126,\n \"acc_stderr\": 0.03078373675774565,\n\ \ \"acc_norm\": 0.33191489361702126,\n \"acc_norm_stderr\": 0.03078373675774565\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n\ \ \"acc_stderr\": 0.041857744240220554,\n \"acc_norm\": 0.2719298245614035,\n\ \ \"acc_norm_stderr\": 0.041857744240220554\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2620689655172414,\n \"acc_stderr\": 0.03664666337225256,\n\ \ \"acc_norm\": 0.2620689655172414,\n \"acc_norm_stderr\": 0.03664666337225256\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.25396825396825395,\n \"acc_stderr\": 0.02241804289111394,\n \"\ acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.02241804289111394\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.14285714285714285,\n\ \ \"acc_stderr\": 0.03129843185743811,\n \"acc_norm\": 0.14285714285714285,\n\ \ \"acc_norm_stderr\": 0.03129843185743811\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.27419354838709675,\n\ \ \"acc_stderr\": 0.025378139970885196,\n \"acc_norm\": 0.27419354838709675,\n\ \ \"acc_norm_stderr\": 0.025378139970885196\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.2955665024630542,\n \"acc_stderr\": 0.032104944337514575,\n\ \ \"acc_norm\": 0.2955665024630542,\n \"acc_norm_stderr\": 0.032104944337514575\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\"\ : 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.23030303030303031,\n \"acc_stderr\": 0.032876667586034886,\n\ \ \"acc_norm\": 0.23030303030303031,\n \"acc_norm_stderr\": 0.032876667586034886\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.3383838383838384,\n \"acc_stderr\": 0.03371124142626304,\n \"\ acc_norm\": 0.3383838383838384,\n \"acc_norm_stderr\": 0.03371124142626304\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.27979274611398963,\n \"acc_stderr\": 0.032396370467357036,\n\ \ \"acc_norm\": 0.27979274611398963,\n \"acc_norm_stderr\": 0.032396370467357036\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.26153846153846155,\n \"acc_stderr\": 0.022282141204204416,\n\ \ \"acc_norm\": 0.26153846153846155,\n \"acc_norm_stderr\": 0.022282141204204416\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24074074074074073,\n \"acc_stderr\": 0.026067159222275798,\n \ \ \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.026067159222275798\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.02934457250063434,\n \ \ \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.02934457250063434\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"\ acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.28807339449541286,\n \"acc_stderr\": 0.01941644589263603,\n \"\ acc_norm\": 0.28807339449541286,\n \"acc_norm_stderr\": 0.01941644589263603\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4027777777777778,\n \"acc_stderr\": 0.03344887382997866,\n \"\ acc_norm\": 0.4027777777777778,\n \"acc_norm_stderr\": 0.03344887382997866\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.22549019607843138,\n \"acc_stderr\": 0.02933116229425173,\n \"\ acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.02933116229425173\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.28270042194092826,\n \"acc_stderr\": 0.02931281415395594,\n \ \ \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.02931281415395594\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.27802690582959644,\n\ \ \"acc_stderr\": 0.03006958487449403,\n \"acc_norm\": 0.27802690582959644,\n\ \ \"acc_norm_stderr\": 0.03006958487449403\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.1984732824427481,\n \"acc_stderr\": 0.034981493854624734,\n\ \ \"acc_norm\": 0.1984732824427481,\n \"acc_norm_stderr\": 0.034981493854624734\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.36363636363636365,\n \"acc_stderr\": 0.043913262867240704,\n \"\ acc_norm\": 0.36363636363636365,\n \"acc_norm_stderr\": 0.043913262867240704\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2962962962962963,\n\ \ \"acc_stderr\": 0.04414343666854933,\n \"acc_norm\": 0.2962962962962963,\n\ \ \"acc_norm_stderr\": 0.04414343666854933\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.27607361963190186,\n \"acc_stderr\": 0.0351238528370505,\n\ \ \"acc_norm\": 0.27607361963190186,\n \"acc_norm_stderr\": 0.0351238528370505\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25892857142857145,\n\ \ \"acc_stderr\": 0.04157751539865629,\n \"acc_norm\": 0.25892857142857145,\n\ \ \"acc_norm_stderr\": 0.04157751539865629\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690877,\n\ \ \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690877\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2564102564102564,\n\ \ \"acc_stderr\": 0.028605953702004253,\n \"acc_norm\": 0.2564102564102564,\n\ \ \"acc_norm_stderr\": 0.028605953702004253\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2835249042145594,\n\ \ \"acc_stderr\": 0.016117318166832283,\n \"acc_norm\": 0.2835249042145594,\n\ \ \"acc_norm_stderr\": 0.016117318166832283\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.2832369942196532,\n \"acc_stderr\": 0.02425790170532337,\n\ \ \"acc_norm\": 0.2832369942196532,\n \"acc_norm_stderr\": 0.02425790170532337\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.2581699346405229,\n \"acc_stderr\": 0.025058503316958154,\n\ \ \"acc_norm\": 0.2581699346405229,\n \"acc_norm_stderr\": 0.025058503316958154\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n\ \ \"acc_stderr\": 0.025218040373410626,\n \"acc_norm\": 0.27009646302250806,\n\ \ \"acc_norm_stderr\": 0.025218040373410626\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.023468429832451166,\n\ \ \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.023468429832451166\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2695035460992908,\n \"acc_stderr\": 0.026469036818590638,\n \ \ \"acc_norm\": 0.2695035460992908,\n \"acc_norm_stderr\": 0.026469036818590638\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.22816166883963493,\n\ \ \"acc_stderr\": 0.01071799219204788,\n \"acc_norm\": 0.22816166883963493,\n\ \ \"acc_norm_stderr\": 0.01071799219204788\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.3161764705882353,\n \"acc_stderr\": 0.028245687391462916,\n\ \ \"acc_norm\": 0.3161764705882353,\n \"acc_norm_stderr\": 0.028245687391462916\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.27124183006535946,\n \"acc_stderr\": 0.01798661530403031,\n \ \ \"acc_norm\": 0.27124183006535946,\n \"acc_norm_stderr\": 0.01798661530403031\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.35454545454545455,\n\ \ \"acc_stderr\": 0.04582004841505416,\n \"acc_norm\": 0.35454545454545455,\n\ \ \"acc_norm_stderr\": 0.04582004841505416\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.3020408163265306,\n \"acc_stderr\": 0.02939360931987981,\n\ \ \"acc_norm\": 0.3020408163265306,\n \"acc_norm_stderr\": 0.02939360931987981\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24875621890547264,\n\ \ \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.24875621890547264,\n\ \ \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.04093601807403325,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.04093601807403325\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2469879518072289,\n\ \ \"acc_stderr\": 0.03357351982064536,\n \"acc_norm\": 0.2469879518072289,\n\ \ \"acc_norm_stderr\": 0.03357351982064536\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.2807017543859649,\n \"acc_stderr\": 0.034462962170884265,\n\ \ \"acc_norm\": 0.2807017543859649,\n \"acc_norm_stderr\": 0.034462962170884265\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.01450904517148729,\n \"mc2\": 0.35027279444600373,\n\ \ \"mc2_stderr\": 0.01335009503768823\n }\n}\n```" repo_url: https://huggingface.co/klosax/open_llama_3b_350bt_preview leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|arc:challenge|25_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hellaswag|10_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:25:13.548749.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:25:13.548749.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T10_25_13.548749 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T10:25:13.548749.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T10:25:13.548749.parquet' - config_name: results data_files: - split: 2023_07_24T10_25_13.548749 path: - results_2023-07-24T10:25:13.548749.parquet - split: latest path: - results_2023-07-24T10:25:13.548749.parquet --- # Dataset Card for Evaluation run of klosax/open_llama_3b_350bt_preview ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/klosax/open_llama_3b_350bt_preview - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [klosax/open_llama_3b_350bt_preview](https://huggingface.co/klosax/open_llama_3b_350bt_preview) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_klosax__open_llama_3b_350bt_preview", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-24T10:25:13.548749](https://huggingface.co/datasets/open-llm-leaderboard/details_klosax__open_llama_3b_350bt_preview/blob/main/results_2023-07-24T10%3A25%3A13.548749.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.27221888756913304, "acc_stderr": 0.03212813724268037, "acc_norm": 0.2751900409341744, "acc_norm_stderr": 0.032129932454657235, "mc1": 0.22031823745410037, "mc1_stderr": 0.01450904517148729, "mc2": 0.35027279444600373, "mc2_stderr": 0.01335009503768823 }, "harness|arc:challenge|25": { "acc": 0.34215017064846415, "acc_stderr": 0.01386415215917728, "acc_norm": 0.3651877133105802, "acc_norm_stderr": 0.014070265519268802 }, "harness|hellaswag|10": { "acc": 0.4563831905994822, "acc_stderr": 0.004970759774676886, "acc_norm": 0.6086436964748058, "acc_norm_stderr": 0.004870563921220623 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.23026315789473684, "acc_stderr": 0.03426059424403165, "acc_norm": 0.23026315789473684, "acc_norm_stderr": 0.03426059424403165 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.30566037735849055, "acc_stderr": 0.028353298073322666, "acc_norm": 0.30566037735849055, "acc_norm_stderr": 0.028353298073322666 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.22916666666666666, "acc_stderr": 0.03514697467862388, "acc_norm": 0.22916666666666666, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.14705882352941177, "acc_stderr": 0.03524068951567447, "acc_norm": 0.14705882352941177, "acc_norm_stderr": 0.03524068951567447 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.33191489361702126, "acc_stderr": 0.03078373675774565, "acc_norm": 0.33191489361702126, "acc_norm_stderr": 0.03078373675774565 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.041857744240220554, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.041857744240220554 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.03664666337225256, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.03664666337225256 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25396825396825395, "acc_stderr": 0.02241804289111394, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.02241804289111394 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.14285714285714285, "acc_stderr": 0.03129843185743811, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.03129843185743811 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.27419354838709675, "acc_stderr": 0.025378139970885196, "acc_norm": 0.27419354838709675, "acc_norm_stderr": 0.025378139970885196 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2955665024630542, "acc_stderr": 0.032104944337514575, "acc_norm": 0.2955665024630542, "acc_norm_stderr": 0.032104944337514575 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.23030303030303031, "acc_stderr": 0.032876667586034886, "acc_norm": 0.23030303030303031, "acc_norm_stderr": 0.032876667586034886 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3383838383838384, "acc_stderr": 0.03371124142626304, "acc_norm": 0.3383838383838384, "acc_norm_stderr": 0.03371124142626304 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.27979274611398963, "acc_stderr": 0.032396370467357036, "acc_norm": 0.27979274611398963, "acc_norm_stderr": 0.032396370467357036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.26153846153846155, "acc_stderr": 0.022282141204204416, "acc_norm": 0.26153846153846155, "acc_norm_stderr": 0.022282141204204416 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.026067159222275798, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.026067159222275798 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2857142857142857, "acc_stderr": 0.02934457250063434, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.02934457250063434 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.28807339449541286, "acc_stderr": 0.01941644589263603, "acc_norm": 0.28807339449541286, "acc_norm_stderr": 0.01941644589263603 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.03344887382997866, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.03344887382997866 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.22549019607843138, "acc_stderr": 0.02933116229425173, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.02933116229425173 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.02931281415395594, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.02931281415395594 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.27802690582959644, "acc_stderr": 0.03006958487449403, "acc_norm": 0.27802690582959644, "acc_norm_stderr": 0.03006958487449403 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.1984732824427481, "acc_stderr": 0.034981493854624734, "acc_norm": 0.1984732824427481, "acc_norm_stderr": 0.034981493854624734 }, "harness|hendrycksTest-international_law|5": { "acc": 0.36363636363636365, "acc_stderr": 0.043913262867240704, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.043913262867240704 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.04414343666854933, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.04414343666854933 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.27607361963190186, "acc_stderr": 0.0351238528370505, "acc_norm": 0.27607361963190186, "acc_norm_stderr": 0.0351238528370505 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.04157751539865629, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.04157751539865629 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690877, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690877 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2564102564102564, "acc_stderr": 0.028605953702004253, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.028605953702004253 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2835249042145594, "acc_stderr": 0.016117318166832283, "acc_norm": 0.2835249042145594, "acc_norm_stderr": 0.016117318166832283 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2832369942196532, "acc_stderr": 0.02425790170532337, "acc_norm": 0.2832369942196532, "acc_norm_stderr": 0.02425790170532337 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2581699346405229, "acc_stderr": 0.025058503316958154, "acc_norm": 0.2581699346405229, "acc_norm_stderr": 0.025058503316958154 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.025218040373410626, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.025218040373410626 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.23148148148148148, "acc_stderr": 0.023468429832451166, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.023468429832451166 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590638, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590638 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.22816166883963493, "acc_stderr": 0.01071799219204788, "acc_norm": 0.22816166883963493, "acc_norm_stderr": 0.01071799219204788 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3161764705882353, "acc_stderr": 0.028245687391462916, "acc_norm": 0.3161764705882353, "acc_norm_stderr": 0.028245687391462916 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27124183006535946, "acc_stderr": 0.01798661530403031, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.01798661530403031 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.35454545454545455, "acc_stderr": 0.04582004841505416, "acc_norm": 0.35454545454545455, "acc_norm_stderr": 0.04582004841505416 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3020408163265306, "acc_stderr": 0.02939360931987981, "acc_norm": 0.3020408163265306, "acc_norm_stderr": 0.02939360931987981 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916714, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916714 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.21, "acc_stderr": 0.04093601807403325, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403325 }, "harness|hendrycksTest-virology|5": { "acc": 0.2469879518072289, "acc_stderr": 0.03357351982064536, "acc_norm": 0.2469879518072289, "acc_norm_stderr": 0.03357351982064536 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2807017543859649, "acc_stderr": 0.034462962170884265, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.034462962170884265 }, "harness|truthfulqa:mc|0": { "mc1": 0.22031823745410037, "mc1_stderr": 0.01450904517148729, "mc2": 0.35027279444600373, "mc2_stderr": 0.01335009503768823 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7228883504867554, -0.830833911895752, 0.3179701268672943, 0.23814797401428223, -0.16338394582271576, 0.004764038138091564, 0.03349898010492325, -0.2547321915626526, 0.5857554078102112, -0.06633922457695007, -0.47760000824928284, -0.7090312838554382, -0.47864317893981934, 0.2706993818283081, -0.04292461648583412, 0.8521264791488647, -0.23516635596752167, -0.13934126496315002, 0.07946447283029556, -0.05072940140962601, -0.2532193064689636, -0.3183518052101135, -0.43847450613975525, -0.3810878396034241, 0.2182796448469162, 0.46484726667404175, 0.46629971265792847, 0.8052139282226562, 0.6690099835395813, 0.2770598828792572, -0.33321863412857056, -0.0193064883351326, -0.1745624840259552, -0.29926684498786926, 0.42654940485954285, -0.3637590706348419, -0.8303859233856201, 0.2882731854915619, 0.7668927311897278, 0.6696351170539856, -0.06817921996116638, 0.3071839511394501, 0.039828184992074966, 0.5745846033096313, -0.38053953647613525, 0.05465971305966377, -0.2686132788658142, 0.22453802824020386, -0.1934388279914856, -0.2685838043689728, -0.31619101762771606, -0.2829933166503906, -0.16755230724811554, -0.871845543384552, 0.2467675358057022, 0.30904698371887207, 1.5862176418304443, -0.12795619666576385, -0.259523868560791, 0.0709834098815918, -0.1044408455491066, 0.9844426512718201, -0.8540088534355164, 0.3783172070980072, 0.8035372495651245, 0.11324117332696915, -0.1936681866645813, -0.5889161229133606, -0.6432284116744995, 0.04576487839221954, -0.366814523935318, 0.348662406206131, -0.08557793498039246, -0.1980080008506775, 0.3615407347679138, 0.6724265217781067, -0.6683410406112671, 0.20054540038108826, -0.6660575866699219, -0.18166811764240265, 1.0468645095825195, 0.35012415051460266, 0.01214168593287468, -0.33754515647888184, -0.6803547143936157, -0.6836809515953064, -0.42894187569618225, 0.26962539553642273, 0.46284329891204834, 0.34361374378204346, -0.3943561017513275, 0.7141936421394348, -0.43887951970100403, 0.5591516494750977, 0.40408748388290405, -0.027452075853943825, 0.9365503787994385, -0.6584871411323547, -0.5230002999305725, -0.05452444776892662, 1.088296890258789, 0.5730528831481934, 0.023709243163466454, 0.23130694031715393, 0.04703434556722641, -0.08224861323833466, 0.004219754599034786, -0.8752396106719971, -0.2942817807197571, 0.18723712861537933, -0.41164129972457886, -0.5015423893928528, 0.28903332352638245, -0.8886741399765015, 0.14135390520095825, -0.025986047461628914, 0.369878888130188, -0.5109977722167969, -0.1111396923661232, 0.25388360023498535, -0.3824838101863861, 0.7961692214012146, -0.17732080817222595, -0.7696142792701721, 0.3987855911254883, 0.49098435044288635, 0.7666603326797485, -0.08901571482419968, -0.4508945345878601, -0.10767427086830139, -0.11998222023248672, -0.2928922772407532, 0.5490621328353882, -0.29292139410972595, -0.43931111693382263, -0.29809486865997314, 0.2952168881893158, -0.23771846294403076, -0.32420557737350464, 0.7281703948974609, -0.24920213222503662, 0.19717712700366974, -0.45770758390426636, -0.631428599357605, 0.09694184362888336, 0.3779561519622803, -0.4213424324989319, 1.3162999153137207, 0.26307687163352966, -0.7969990968704224, 0.4493793249130249, -0.5900209546089172, -0.14351020753383636, -0.039818067103624344, -0.042496148496866226, -0.806753396987915, -0.29370802640914917, 0.19923561811447144, 0.4190463125705719, -0.13943973183631897, -0.12871511280536652, -0.40616777539253235, -0.3456880748271942, 0.32124000787734985, -0.12121358513832092, 1.1920032501220703, -0.008728149347007275, -0.7247603535652161, -0.1260131448507309, -1.2478598356246948, 0.29974067211151123, 0.2239312380552292, -0.402618408203125, -0.14533868432044983, -0.4865207374095917, -0.04410477355122566, 0.14888232946395874, 0.28031831979751587, -0.8083896636962891, 0.31305795907974243, -0.35078302025794983, 0.12529896199703217, 1.279517650604248, 0.01637181080877781, 0.16815191507339478, -0.5492712259292603, 0.5527722239494324, 0.20547771453857422, 0.1674143373966217, 0.38012662529945374, -0.6300663352012634, -0.795109748840332, -0.5061302185058594, -0.031557466834783554, 0.5859805941581726, -0.2075163573026657, 1.0926507711410522, 0.06975214183330536, -0.9401121735572815, -0.45718494057655334, -0.1075640618801117, 0.5146273374557495, 0.8004429936408997, 0.5892853736877441, -0.046116407960653305, -0.6234506964683533, -1.1390697956085205, -0.26858359575271606, -0.17237229645252228, 0.13841260969638824, 0.23781631886959076, 1.0557200908660889, -0.27205735445022583, 0.6174187064170837, -1.0971219539642334, -0.18117713928222656, 0.18473976850509644, -0.05852600932121277, 0.8129383325576782, 0.7096368670463562, 0.6024502515792847, -0.658004641532898, -0.5256395936012268, 0.20035381615161896, -0.8605707883834839, -0.12814629077911377, 0.15956845879554749, -0.30447882413864136, 0.14676207304000854, 0.12296433746814728, -0.6779850125312805, 0.5174169540405273, 0.23550423979759216, -1.0637075901031494, 1.0792880058288574, -0.3091020882129669, 0.5536607503890991, -1.0004825592041016, 0.20027734339237213, -0.07108098268508911, 0.06595306843519211, -0.49279677867889404, 0.019713111221790314, 0.12181868404150009, 0.4643628001213074, -0.4867893159389496, 0.7927519083023071, -0.6820924282073975, -0.10928744822740555, 0.4267893135547638, 0.11192870140075684, -0.10059761255979538, 0.39671096205711365, -0.252244234085083, 0.7904894351959229, 0.742291271686554, -0.49250680208206177, 0.5041588544845581, 0.4332810640335083, -0.23502416908740997, 0.7068532705307007, -0.4620518088340759, -0.2783277928829193, 0.28057947754859924, 0.008635596372187138, -0.8278815150260925, -0.5313451290130615, 0.07973578572273254, -0.592424213886261, -0.1283324956893921, 0.43348079919815063, -0.29125016927719116, -0.8311638236045837, -0.9269998073577881, 0.3491755723953247, 0.6842287182807922, -0.4556281864643097, -0.17568081617355347, 0.04270853102207184, 0.10271824896335602, -0.8492780923843384, -0.7911126017570496, -0.49218955636024475, -0.23204639554023743, -0.6642516851425171, 0.32531559467315674, -0.2996703088283539, -0.2676595151424408, -0.08574623614549637, -0.2345341145992279, -0.3116952180862427, 0.027889830991625786, 0.13885024189949036, 0.6588765978813171, -0.3963969647884369, -0.3071631193161011, -0.255033016204834, -0.14500993490219116, 0.20789799094200134, -0.07140422612428665, 0.40423446893692017, -0.4940996468067169, -0.4117346405982971, -0.4171440005302429, -0.013376348651945591, 0.6921891570091248, -0.050358083099126816, 0.7004061937332153, 0.42468705773353577, -0.2994367480278015, 0.048467304557561874, -0.29832208156585693, -0.23034879565238953, -0.578153669834137, 0.29258525371551514, -0.5027481913566589, -1.0614269971847534, 0.7885987758636475, 0.5372922420501709, 0.05403801053762436, 1.1059459447860718, 0.6543005704879761, -0.28720006346702576, 1.0318433046340942, 0.0840618684887886, 0.3224681317806244, 0.36825457215309143, -0.6737306118011475, 0.10066912323236465, -0.9299197793006897, -0.3472694456577301, -0.5976769924163818, -0.49672847986221313, -0.7262266874313354, -0.06961768120527267, 0.28012222051620483, 0.1515878438949585, -0.7056799530982971, 0.6189609169960022, -0.8377586007118225, 0.5904778242111206, 0.581059992313385, 0.276200532913208, 0.14887340366840363, -0.16817273199558258, -0.4071596562862396, -0.09061529487371445, -0.43635135889053345, -0.2681810259819031, 1.2354485988616943, 0.2795579135417938, 0.7060247659683228, 0.07146329432725906, 0.8900829553604126, 0.09444227069616318, -0.04295569658279419, -0.5769246816635132, 0.6485879421234131, 0.1801825910806656, -0.7664759159088135, -0.38059502840042114, -0.4891095757484436, -1.1229795217514038, 0.3978710472583771, -0.12651997804641724, -0.8854652643203735, 0.1502641886472702, 0.011725349351763725, -0.20152518153190613, 0.4831729531288147, -0.5504356026649475, 0.7890061736106873, -0.12135839462280273, -0.45763272047042847, 0.12256307899951935, -0.8359136581420898, 0.475810706615448, 0.1599845141172409, 0.2710181772708893, 0.008146234788000584, 0.23519489169120789, 1.1682827472686768, -0.775739312171936, 0.4444478154182434, 0.09745179861783981, -0.0004719192802440375, 0.3178512454032898, -0.165859654545784, 0.5040310025215149, 0.06640098243951797, -0.01838383823633194, -0.08495732396841049, 0.2692375183105469, -0.8737043738365173, -0.031228581443428993, 0.9064058065414429, -0.9905838370323181, -0.6083062291145325, -0.8884535431861877, -0.5724603533744812, 0.07325877249240875, 0.5839042663574219, 0.3448641002178192, 0.4636003077030182, 0.0209873728454113, 0.4622756540775299, 0.777556836605072, -0.142127126455307, 0.6141994595527649, 0.2622086703777313, 0.0715305358171463, -0.7076491117477417, 0.8441084027290344, 0.06701774895191193, 0.37224873900413513, 0.252338171005249, 0.3999702036380768, -0.5303683876991272, -0.1958637833595276, -0.21648193895816803, 0.5114681720733643, -0.6537737846374512, -0.3132273852825165, -0.3979981243610382, -0.34954163432121277, -0.7602435946464539, -0.665513277053833, -0.2868698835372925, -0.4928525388240814, -0.5007627010345459, -0.5050411224365234, 0.5934276580810547, 0.49460074305534363, -0.39224278926849365, 0.09698346257209778, -0.4757172763347626, 0.2653266489505768, 0.3071242868900299, 0.5534436106681824, -0.3784242570400238, -0.5113344192504883, 0.038832731544971466, -0.12828201055526733, -0.6364109516143799, -0.9658401608467102, 0.35579806566238403, -0.052123621106147766, 0.5199999809265137, 0.5787690877914429, 0.04330868273973465, 0.8524218797683716, -0.1991586834192276, 1.0839639902114868, 0.37027162313461304, -0.7791595458984375, 0.7166920900344849, -0.3301621675491333, 0.1731177568435669, 0.6248299479484558, 0.2018565684556961, -0.18672342598438263, -0.7000192999839783, -1.3166614770889282, -0.8179207444190979, 0.6904382705688477, 0.41839170455932617, -0.29223984479904175, 0.02328862063586712, 0.13769064843654633, -0.24722693860530853, -0.16832125186920166, -0.7250856757164001, -0.8560022115707397, -0.1592012196779251, -0.4737092852592468, 0.0853164941072464, 0.03264143690466881, -0.40460965037345886, -0.7938237190246582, 0.9316779971122742, -0.03262793645262718, 0.5752214789390564, 0.4607485830783844, 0.09197913855314255, 0.05311973765492439, 0.4640534818172455, 0.9568173289299011, 0.7225692868232727, -0.46709108352661133, 0.4047721028327942, 0.4530717134475708, -1.0583369731903076, 0.4918287396430969, 0.3039496839046478, -0.03766098991036415, -0.04381585493683815, 0.49110502004623413, 0.4247126281261444, 0.036458566784858704, -0.2238907366991043, 0.6082247495651245, 0.00023094916832633317, -0.5829900503158569, -0.3752778172492981, 0.08209003508090973, -0.12032891064882278, 0.016101956367492676, 0.3889898359775543, -0.1394156515598297, -0.04813766106963158, -0.4787796139717102, 0.45030662417411804, 0.3442833125591278, -0.4292125701904297, -0.16344884037971497, 0.733581006526947, -0.2019297182559967, -0.15283085405826569, 0.3111404478549957, -0.16522181034088135, -0.6252186894416809, 1.1596674919128418, 0.5980294346809387, 0.6662766337394714, -0.24642087519168854, -0.0823587030172348, 0.9146968722343445, 0.3770485520362854, -0.03811008855700493, 0.5305518507957458, 0.2898232936859131, -0.26442238688468933, 0.17658907175064087, -0.8940706849098206, -0.08392761647701263, 0.15915019810199738, -0.8391612768173218, 0.33275678753852844, -0.4824903905391693, -0.18059904873371124, 0.00266822287812829, 0.41804173588752747, -0.4862479269504547, 0.5297198295593262, -0.3852803707122803, 1.2137389183044434, -1.0124009847640991, 0.6887843012809753, 0.7401012778282166, -0.5502682328224182, -1.0394189357757568, -0.5339041948318481, -0.012584752403199673, -0.8520069718360901, 0.5995878577232361, -0.018295856192708015, 0.17822235822677612, -0.09247900545597076, -0.7322907447814941, -0.9708056449890137, 1.4478181600570679, -0.018455225974321365, -0.4145669937133789, 0.2520718276500702, -0.02802235633134842, 0.4525027871131897, 0.12663446366786957, 0.5977253913879395, 0.7748419046401978, 0.8270903825759888, -0.06873001158237457, -0.7726669907569885, 0.3371836245059967, -0.5129384398460388, -0.32950982451438904, 0.4583762586116791, -0.9487183690071106, 1.1934367418289185, -0.008100786246359348, 0.2240033745765686, -0.1597910076379776, 0.6775144338607788, 0.8435311913490295, 0.33001604676246643, 0.3917256295681, 0.9129227995872498, 0.8660570979118347, -0.48444750905036926, 1.0288959741592407, -0.21881404519081116, 0.8454575538635254, 0.6904826164245605, 0.17239992320537567, 0.7594231367111206, 0.6859034299850464, -0.5513246655464172, 0.5692442059516907, 0.8452369570732117, -0.3125196099281311, 0.420198529958725, 0.26922523975372314, -0.11667013168334961, -0.1310911923646927, 0.4105144739151001, -0.9061686992645264, 0.17348049581050873, 0.07117189466953278, -0.3534943759441376, 0.05549709498882294, -0.43270644545555115, 0.35205358266830444, -0.08986171334981918, -0.03556692972779274, 0.35026460886001587, 0.04498003423213959, -0.4778577387332916, 0.9748746156692505, -0.1640474945306778, 0.729981541633606, -0.5026916265487671, -0.08393973112106323, -0.3666099011898041, 0.5827139019966125, -0.44803109765052795, -1.0840734243392944, 0.16677385568618774, 0.08037136495113373, -0.14159221947193146, -0.20296545326709747, 0.6709179878234863, -0.14276868104934692, -0.7795538306236267, 0.1593308299779892, 0.062137018889188766, 0.1155131608247757, 0.5618293285369873, -0.6386542916297913, -0.3168904185295105, -0.0442674420773983, -0.574957549571991, 0.13456663489341736, 0.2872351408004761, 0.2660363018512726, 0.5661129951477051, 0.6774712204933167, 0.13097214698791504, 0.3779257535934448, -0.5410529971122742, 0.8568121790885925, -1.0277249813079834, -0.708328366279602, -0.9240571856498718, 0.4368002712726593, -0.31822100281715393, -0.8760014772415161, 0.9918578863143921, 1.0488674640655518, 0.8961426615715027, -0.005412714555859566, 0.6056941747665405, -0.38043463230133057, 0.2530307173728943, -0.3805112838745117, 0.9230343699455261, -0.8651906251907349, -0.23531955480575562, -0.28774815797805786, -0.695178210735321, -0.39947766065597534, 0.8145677447319031, -0.18012800812721252, 0.007288915570825338, 1.062229871749878, 0.6782097220420837, -0.0762336328625679, 0.06238475814461708, -0.08075586706399918, 0.5848485231399536, 0.3958182632923126, 0.9994879364967346, 0.6556973457336426, -0.7699660658836365, 0.38040459156036377, -0.5212653875350952, -0.4150725305080414, -0.416789710521698, -0.49921077489852905, -0.8386296033859253, -0.49556103348731995, -0.19750571250915527, -0.6269165277481079, -0.14582370221614838, 0.9805871248245239, 0.43113887310028076, -0.8987160921096802, -0.41373416781425476, -0.11336638033390045, 0.15836112201213837, -0.6022099256515503, -0.4047708511352539, 0.7368841767311096, -0.09027744829654694, -0.5521833896636963, 0.22233928740024567, -0.15522396564483643, 0.20231713354587555, 0.13379384577274323, -0.4051603674888611, -0.7105662822723389, 0.005394060164690018, 0.45522695779800415, 0.34359413385391235, -0.7128283977508545, -0.7513788342475891, 0.32302048802375793, -0.5086020231246948, 0.44434934854507446, -0.07533164322376251, -0.5470083355903625, 0.03474326431751251, 0.6872280836105347, 0.4542481303215027, 0.6762793660163879, -0.05745890364050865, 0.07565394788980484, -0.6832141876220703, 0.1819617599248886, 0.011617974378168583, 0.32241711020469666, -0.04436737671494484, -0.33594515919685364, 0.7940024733543396, 0.6551986336708069, -0.5544012784957886, -1.1319941282272339, -0.403437077999115, -1.454404354095459, -0.019682079553604126, 1.1361865997314453, 0.03382134065032005, -0.4902902841567993, 0.25542154908180237, -0.1559443473815918, 0.19664551317691803, -0.3268088400363922, 0.7771408557891846, 0.8155487179756165, -0.39378952980041504, 0.14715977013111115, -0.6435824036598206, 0.32869166135787964, 0.524313747882843, -1.1889070272445679, -0.10303080081939697, 0.24386024475097656, 0.318876713514328, 0.37453994154930115, 0.6533094048500061, -0.07442627102136612, 0.27566108107566833, 0.23270685970783234, 0.06127769872546196, 0.0021314334589987993, 0.05731702223420143, -0.20136170089244843, 0.11468886584043503, -0.26387858390808105, -0.46194639801979065 ]
open-llm-leaderboard/details_xDAN-AI__xDAN_13b_l2_lora
open-llm-leaderboard
2023-08-27T12:34:38Z
201
0
[ "region:us" ]
null
2023-08-18T11:31:32Z
--- pretty_name: Evaluation run of xDAN-AI/xDAN_13b_l2_lora dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [xDAN-AI/xDAN_13b_l2_lora](https://huggingface.co/xDAN-AI/xDAN_13b_l2_lora) on\ \ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xDAN-AI__xDAN_13b_l2_lora\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-26T14:52:48.502405](https://huggingface.co/datasets/open-llm-leaderboard/details_xDAN-AI__xDAN_13b_l2_lora/blob/main/results_2023-07-26T14%3A52%3A48.502405.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5614989942866122,\n\ \ \"acc_stderr\": 0.034331003794690465,\n \"acc_norm\": 0.5656785190124449,\n\ \ \"acc_norm_stderr\": 0.03430930050159532,\n \"mc1\": 0.31946144430844553,\n\ \ \"mc1_stderr\": 0.016322644182960498,\n \"mc2\": 0.44746680649420667,\n\ \ \"mc2_stderr\": 0.01496374462169886\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5691126279863481,\n \"acc_stderr\": 0.01447113339264247,\n\ \ \"acc_norm\": 0.6100682593856656,\n \"acc_norm_stderr\": 0.014252959848892889\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6207926707827126,\n\ \ \"acc_stderr\": 0.004841981973515282,\n \"acc_norm\": 0.8264289982075284,\n\ \ \"acc_norm_stderr\": 0.0037796612246514746\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n\ \ \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n\ \ \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296564,\n\ \ \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296564\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n\ \ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6226415094339622,\n \"acc_stderr\": 0.029832808114796005,\n\ \ \"acc_norm\": 0.6226415094339622,\n \"acc_norm_stderr\": 0.029832808114796005\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6180555555555556,\n\ \ \"acc_stderr\": 0.040629907841466674,\n \"acc_norm\": 0.6180555555555556,\n\ \ \"acc_norm_stderr\": 0.040629907841466674\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n\ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4913294797687861,\n\ \ \"acc_stderr\": 0.03811890988940412,\n \"acc_norm\": 0.4913294797687861,\n\ \ \"acc_norm_stderr\": 0.03811890988940412\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.043898699568087764,\n\ \ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.043898699568087764\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n\ \ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n\ \ \"acc_stderr\": 0.044895393502707,\n \"acc_norm\": 0.3508771929824561,\n\ \ \"acc_norm_stderr\": 0.044895393502707\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n\ \ \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.30687830687830686,\n \"acc_stderr\": 0.023752928712112143,\n \"\ acc_norm\": 0.30687830687830686,\n \"acc_norm_stderr\": 0.023752928712112143\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n\ \ \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n\ \ \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6612903225806451,\n\ \ \"acc_stderr\": 0.026923446059302844,\n \"acc_norm\": 0.6612903225806451,\n\ \ \"acc_norm_stderr\": 0.026923446059302844\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.03510766597959217,\n\ \ \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.03510766597959217\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n\ \ \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.0364620496325381,\n\ \ \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.0364620496325381\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7171717171717171,\n \"acc_stderr\": 0.032087795587867514,\n \"\ acc_norm\": 0.7171717171717171,\n \"acc_norm_stderr\": 0.032087795587867514\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.02925282329180363,\n\ \ \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.02925282329180363\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.49743589743589745,\n \"acc_stderr\": 0.025350672979412195,\n\ \ \"acc_norm\": 0.49743589743589745,\n \"acc_norm_stderr\": 0.025350672979412195\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2851851851851852,\n \"acc_stderr\": 0.027528599210340496,\n \ \ \"acc_norm\": 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340496\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5840336134453782,\n \"acc_stderr\": 0.032016501007396114,\n\ \ \"acc_norm\": 0.5840336134453782,\n \"acc_norm_stderr\": 0.032016501007396114\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"\ acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7559633027522936,\n \"acc_stderr\": 0.018415286351416416,\n \"\ acc_norm\": 0.7559633027522936,\n \"acc_norm_stderr\": 0.018415286351416416\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896079,\n \"\ acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896079\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7745098039215687,\n \"acc_stderr\": 0.029331162294251735,\n \"\ acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.029331162294251735\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7468354430379747,\n \"acc_stderr\": 0.0283046579430353,\n \ \ \"acc_norm\": 0.7468354430379747,\n \"acc_norm_stderr\": 0.0283046579430353\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n\ \ \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n\ \ \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6030534351145038,\n \"acc_stderr\": 0.04291135671009224,\n\ \ \"acc_norm\": 0.6030534351145038,\n \"acc_norm_stderr\": 0.04291135671009224\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n\ \ \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n\ \ \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7177914110429447,\n \"acc_stderr\": 0.03536117886664743,\n\ \ \"acc_norm\": 0.7177914110429447,\n \"acc_norm_stderr\": 0.03536117886664743\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n\ \ \"acc_stderr\": 0.044642857142857144,\n \"acc_norm\": 0.33035714285714285,\n\ \ \"acc_norm_stderr\": 0.044642857142857144\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\ \ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7905982905982906,\n\ \ \"acc_stderr\": 0.026655699653922737,\n \"acc_norm\": 0.7905982905982906,\n\ \ \"acc_norm_stderr\": 0.026655699653922737\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \ \ \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7675606641123882,\n\ \ \"acc_stderr\": 0.015104550008905723,\n \"acc_norm\": 0.7675606641123882,\n\ \ \"acc_norm_stderr\": 0.015104550008905723\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.025722802200895806,\n\ \ \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.025722802200895806\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.44692737430167595,\n\ \ \"acc_stderr\": 0.016628030039647614,\n \"acc_norm\": 0.44692737430167595,\n\ \ \"acc_norm_stderr\": 0.016628030039647614\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.027684181883302895,\n\ \ \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.027684181883302895\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6302250803858521,\n\ \ \"acc_stderr\": 0.02741799670563099,\n \"acc_norm\": 0.6302250803858521,\n\ \ \"acc_norm_stderr\": 0.02741799670563099\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6234567901234568,\n \"acc_stderr\": 0.02695934451874778,\n\ \ \"acc_norm\": 0.6234567901234568,\n \"acc_norm_stderr\": 0.02695934451874778\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.41843971631205673,\n \"acc_stderr\": 0.02942799403941999,\n \ \ \"acc_norm\": 0.41843971631205673,\n \"acc_norm_stderr\": 0.02942799403941999\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.40352020860495436,\n\ \ \"acc_stderr\": 0.012530241301193182,\n \"acc_norm\": 0.40352020860495436,\n\ \ \"acc_norm_stderr\": 0.012530241301193182\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.49264705882352944,\n \"acc_stderr\": 0.030369552523902173,\n\ \ \"acc_norm\": 0.49264705882352944,\n \"acc_norm_stderr\": 0.030369552523902173\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5522875816993464,\n \"acc_stderr\": 0.02011692534742242,\n \ \ \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.02011692534742242\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6090909090909091,\n\ \ \"acc_stderr\": 0.04673752333670239,\n \"acc_norm\": 0.6090909090909091,\n\ \ \"acc_norm_stderr\": 0.04673752333670239\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6571428571428571,\n \"acc_stderr\": 0.030387262919547728,\n\ \ \"acc_norm\": 0.6571428571428571,\n \"acc_norm_stderr\": 0.030387262919547728\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7412935323383084,\n\ \ \"acc_stderr\": 0.03096590312357302,\n \"acc_norm\": 0.7412935323383084,\n\ \ \"acc_norm_stderr\": 0.03096590312357302\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \ \ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n\ \ \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n\ \ \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n\ \ \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31946144430844553,\n\ \ \"mc1_stderr\": 0.016322644182960498,\n \"mc2\": 0.44746680649420667,\n\ \ \"mc2_stderr\": 0.01496374462169886\n }\n}\n```" repo_url: https://huggingface.co/xDAN-AI/xDAN_13b_l2_lora leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|arc:challenge|25_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hellaswag|10_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:52:48.502405.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:52:48.502405.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_26T14_52_48.502405 path: - '**/details_harness|truthfulqa:mc|0_2023-07-26T14:52:48.502405.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-26T14:52:48.502405.parquet' - config_name: results data_files: - split: 2023_07_26T14_52_48.502405 path: - results_2023-07-26T14:52:48.502405.parquet - split: latest path: - results_2023-07-26T14:52:48.502405.parquet --- # Dataset Card for Evaluation run of xDAN-AI/xDAN_13b_l2_lora ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/xDAN-AI/xDAN_13b_l2_lora - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [xDAN-AI/xDAN_13b_l2_lora](https://huggingface.co/xDAN-AI/xDAN_13b_l2_lora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xDAN-AI__xDAN_13b_l2_lora", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-26T14:52:48.502405](https://huggingface.co/datasets/open-llm-leaderboard/details_xDAN-AI__xDAN_13b_l2_lora/blob/main/results_2023-07-26T14%3A52%3A48.502405.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5614989942866122, "acc_stderr": 0.034331003794690465, "acc_norm": 0.5656785190124449, "acc_norm_stderr": 0.03430930050159532, "mc1": 0.31946144430844553, "mc1_stderr": 0.016322644182960498, "mc2": 0.44746680649420667, "mc2_stderr": 0.01496374462169886 }, "harness|arc:challenge|25": { "acc": 0.5691126279863481, "acc_stderr": 0.01447113339264247, "acc_norm": 0.6100682593856656, "acc_norm_stderr": 0.014252959848892889 }, "harness|hellaswag|10": { "acc": 0.6207926707827126, "acc_stderr": 0.004841981973515282, "acc_norm": 0.8264289982075284, "acc_norm_stderr": 0.0037796612246514746 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296564, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296564 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6226415094339622, "acc_stderr": 0.029832808114796005, "acc_norm": 0.6226415094339622, "acc_norm_stderr": 0.029832808114796005 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6180555555555556, "acc_stderr": 0.040629907841466674, "acc_norm": 0.6180555555555556, "acc_norm_stderr": 0.040629907841466674 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4913294797687861, "acc_stderr": 0.03811890988940412, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.03811890988940412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.043898699568087764, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.043898699568087764 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224468, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.044895393502707, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.044895393502707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30687830687830686, "acc_stderr": 0.023752928712112143, "acc_norm": 0.30687830687830686, "acc_norm_stderr": 0.023752928712112143 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6612903225806451, "acc_stderr": 0.026923446059302844, "acc_norm": 0.6612903225806451, "acc_norm_stderr": 0.026923446059302844 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.03510766597959217, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.03510766597959217 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.0364620496325381, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.0364620496325381 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7171717171717171, "acc_stderr": 0.032087795587867514, "acc_norm": 0.7171717171717171, "acc_norm_stderr": 0.032087795587867514 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.02925282329180363, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.02925282329180363 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412195, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412195 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340496, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340496 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5840336134453782, "acc_stderr": 0.032016501007396114, "acc_norm": 0.5840336134453782, "acc_norm_stderr": 0.032016501007396114 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7559633027522936, "acc_stderr": 0.018415286351416416, "acc_norm": 0.7559633027522936, "acc_norm_stderr": 0.018415286351416416 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896079, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896079 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.029331162294251735, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.029331162294251735 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7468354430379747, "acc_stderr": 0.0283046579430353, "acc_norm": 0.7468354430379747, "acc_norm_stderr": 0.0283046579430353 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7177914110429447, "acc_stderr": 0.03536117886664743, "acc_norm": 0.7177914110429447, "acc_norm_stderr": 0.03536117886664743 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.044642857142857144, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.044642857142857144 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7905982905982906, "acc_stderr": 0.026655699653922737, "acc_norm": 0.7905982905982906, "acc_norm_stderr": 0.026655699653922737 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7675606641123882, "acc_stderr": 0.015104550008905723, "acc_norm": 0.7675606641123882, "acc_norm_stderr": 0.015104550008905723 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6473988439306358, "acc_stderr": 0.025722802200895806, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.025722802200895806 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.44692737430167595, "acc_stderr": 0.016628030039647614, "acc_norm": 0.44692737430167595, "acc_norm_stderr": 0.016628030039647614 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6274509803921569, "acc_stderr": 0.027684181883302895, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.027684181883302895 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6302250803858521, "acc_stderr": 0.02741799670563099, "acc_norm": 0.6302250803858521, "acc_norm_stderr": 0.02741799670563099 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6234567901234568, "acc_stderr": 0.02695934451874778, "acc_norm": 0.6234567901234568, "acc_norm_stderr": 0.02695934451874778 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.02942799403941999, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.02942799403941999 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.40352020860495436, "acc_stderr": 0.012530241301193182, "acc_norm": 0.40352020860495436, "acc_norm_stderr": 0.012530241301193182 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.49264705882352944, "acc_stderr": 0.030369552523902173, "acc_norm": 0.49264705882352944, "acc_norm_stderr": 0.030369552523902173 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5522875816993464, "acc_stderr": 0.02011692534742242, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.02011692534742242 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6090909090909091, "acc_stderr": 0.04673752333670239, "acc_norm": 0.6090909090909091, "acc_norm_stderr": 0.04673752333670239 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.030387262919547728, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.030387262919547728 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7412935323383084, "acc_stderr": 0.03096590312357302, "acc_norm": 0.7412935323383084, "acc_norm_stderr": 0.03096590312357302 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.31946144430844553, "mc1_stderr": 0.016322644182960498, "mc2": 0.44746680649420667, "mc2_stderr": 0.01496374462169886 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7393768429756165, -0.8114016056060791, 0.2598383128643036, 0.18344736099243164, -0.13759593665599823, -0.043974343687295914, 0.042275361716747284, -0.23772084712982178, 0.6240717172622681, -0.061294883489608765, -0.48625651001930237, -0.6794897317886353, -0.4385806620121002, 0.1888665407896042, -0.1032528281211853, 0.8353541493415833, -0.19328226149082184, -0.17355301976203918, 0.12727142870426178, -0.06352219730615616, -0.21499331295490265, -0.32949763536453247, -0.521997332572937, -0.3596116006374359, 0.2403394877910614, 0.4304242432117462, 0.4785161018371582, 0.8616950511932373, 0.6766778826713562, 0.3040302097797394, -0.33153319358825684, -0.030857881531119347, -0.1521015167236328, -0.31889185309410095, 0.36505526304244995, -0.3815508186817169, -0.8368033170700073, 0.31709328293800354, 0.7630084753036499, 0.6155807971954346, -0.06680140644311905, 0.3029535710811615, -0.000364326813723892, 0.6140088438987732, -0.3369360864162445, 0.005261521320790052, -0.2598923444747925, 0.2218191921710968, -0.2055796980857849, -0.24040356278419495, -0.24837827682495117, -0.2257264256477356, -0.14542953670024872, -0.8477739095687866, 0.23717834055423737, 0.3058628737926483, 1.5909230709075928, -0.11266706883907318, -0.2626602053642273, 0.08817461878061295, -0.08760011941194534, 0.9871478080749512, -0.853347659111023, 0.3587927520275116, 0.7685224413871765, 0.12092351168394089, -0.1339578479528427, -0.5817230343818665, -0.6540526151657104, 0.06406460702419281, -0.3653159439563751, 0.34521040320396423, -0.06533850729465485, -0.20150025188922882, 0.3734704554080963, 0.6713783740997314, -0.652644157409668, 0.17975571751594543, -0.6612711548805237, -0.07398490607738495, 1.0714268684387207, 0.32977521419525146, 0.06966676563024521, -0.4285846948623657, -0.7064328789710999, -0.68465656042099, -0.39286375045776367, 0.30792826414108276, 0.4321366548538208, 0.3337838649749756, -0.43482792377471924, 0.6924052834510803, -0.415630042552948, 0.5249191522598267, 0.4211812913417816, 0.03733047842979431, 0.9119268655776978, -0.6975933909416199, -0.5428710579872131, -0.049265991896390915, 1.1031296253204346, 0.5632627606391907, 0.03136903792619705, 0.2402641475200653, 0.027223188430070877, -0.07679425179958344, 0.007302317768335342, -0.8144185543060303, -0.2758650779724121, 0.18852606415748596, -0.39876025915145874, -0.5285428762435913, 0.3475284278392792, -0.8346826434135437, 0.1056104376912117, -0.017588622868061066, 0.41846132278442383, -0.4749460816383362, -0.11457056552171707, 0.22296321392059326, -0.4042544960975647, 0.8162533640861511, -0.22685419023036957, -0.7505098581314087, 0.3705293834209442, 0.49859702587127686, 0.7718946933746338, -0.10105948150157928, -0.410502552986145, -0.1330350637435913, -0.09768138080835342, -0.2906133830547333, 0.5313726663589478, -0.23747418820858002, -0.40702879428863525, -0.32126808166503906, 0.2649849057197571, -0.22933320701122284, -0.33867257833480835, 0.7782236933708191, -0.20735931396484375, 0.16131313145160675, -0.4423474073410034, -0.6121230125427246, 0.1217934787273407, 0.3888412415981293, -0.4609684348106384, 1.2862473726272583, 0.266889750957489, -0.8241388201713562, 0.4160623848438263, -0.6388118267059326, -0.1509069800376892, -0.0614725798368454, -0.08849745243787766, -0.8073482513427734, -0.2632797658443451, 0.17488639056682587, 0.4224499464035034, -0.15178358554840088, -0.14696794748306274, -0.34075891971588135, -0.34553197026252747, 0.40584632754325867, -0.15400069952011108, 1.2251269817352295, -0.030912304297089577, -0.8021653890609741, -0.09143023937940598, -1.2712528705596924, 0.3656300902366638, 0.19451603293418884, -0.37643805146217346, -0.19246560335159302, -0.45688796043395996, -0.028692109510302544, 0.22575566172599792, 0.2507687509059906, -0.8108351826667786, 0.29046183824539185, -0.37911027669906616, 0.14769043028354645, 1.2687897682189941, -0.02466910146176815, 0.12386322766542435, -0.5812164545059204, 0.5177758932113647, 0.21360842883586884, 0.15629155933856964, 0.38276827335357666, -0.5875928401947021, -0.8207666873931885, -0.5002946853637695, -0.04607750475406647, 0.6222068071365356, -0.23222948610782623, 1.1210261583328247, 0.051665693521499634, -0.8952175378799438, -0.4430522620677948, -0.09064295887947083, 0.5389776229858398, 0.7330825328826904, 0.5569600462913513, -0.016487514600157738, -0.650113046169281, -1.0641708374023438, -0.2699020802974701, -0.17429418861865997, 0.18116314709186554, 0.2005186378955841, 1.0435078144073486, -0.26991933584213257, 0.591975212097168, -1.0436547994613647, -0.24290236830711365, 0.13558125495910645, -0.0720958560705185, 0.8263739943504333, 0.7345226407051086, 0.6193163394927979, -0.6867678761482239, -0.5736810564994812, 0.24074684083461761, -0.8951171040534973, -0.09637659788131714, 0.12214994430541992, -0.3073802888393402, 0.14546747505664825, 0.1575951874256134, -0.6861665844917297, 0.5721041560173035, 0.26217886805534363, -1.102890968322754, 1.0226633548736572, -0.31969237327575684, 0.5767392516136169, -1.0326377153396606, 0.19111567735671997, -0.05864308774471283, 0.0016483505023643374, -0.4947285056114197, 0.02331533655524254, 0.08335165679454803, 0.46558982133865356, -0.48625850677490234, 0.8474554419517517, -0.6697046160697937, -0.02932165563106537, 0.4375993609428406, 0.1367071568965912, -0.1306932121515274, 0.3728189170360565, -0.242799773812294, 0.7822004556655884, 0.7481030821800232, -0.4475618004798889, 0.5320374965667725, 0.440880686044693, -0.25401782989501953, 0.7316000461578369, -0.4931250512599945, -0.3269869387149811, 0.30482545495033264, -0.08214852213859558, -0.8283173441886902, -0.47116801142692566, 0.08100946247577667, -0.6150245666503906, -0.12363018840551376, 0.39312809705734253, -0.23923298716545105, -0.7968534231185913, -0.977786660194397, 0.3144540786743164, 0.7236364483833313, -0.4243115484714508, -0.1846499741077423, 0.043557263910770416, 0.1081736758351326, -0.841090977191925, -0.8265936970710754, -0.49095478653907776, -0.18605779111385345, -0.7369441390037537, 0.3077564835548401, -0.26183030009269714, -0.31698498129844666, -0.07269282639026642, -0.24124476313591003, -0.328422874212265, 0.00040381719009019434, 0.18204693496227264, 0.6358086466789246, -0.40984973311424255, -0.33953678607940674, -0.23417524993419647, -0.20486174523830414, 0.25111809372901917, -0.07831228524446487, 0.3445950448513031, -0.41338875889778137, -0.39787864685058594, -0.42553067207336426, 0.018304824829101562, 0.703877329826355, -0.058876391500234604, 0.7557764053344727, 0.42727628350257874, -0.2920893430709839, -0.016827832907438278, -0.24627184867858887, -0.21274885535240173, -0.5804600119590759, 0.27689921855926514, -0.506251335144043, -0.9734824895858765, 0.7691555023193359, 0.5757622122764587, 0.04823179543018341, 1.101024866104126, 0.6087619662284851, -0.31713852286338806, 1.0365139245986938, 0.05394553020596504, 0.3428310453891754, 0.42653921246528625, -0.7455915212631226, 0.12100307643413544, -0.9136948585510254, -0.29275837540626526, -0.5887829661369324, -0.5146316885948181, -0.7037240266799927, -0.077072374522686, 0.3095930218696594, 0.116193987429142, -0.656204104423523, 0.555146336555481, -0.8021054267883301, 0.5918811559677124, 0.5853071808815002, 0.27920207381248474, 0.16583168506622314, -0.144224151968956, -0.3545640707015991, -0.07930652052164078, -0.44707930088043213, -0.21823573112487793, 1.2158608436584473, 0.2512260675430298, 0.7644786238670349, 0.09486545622348785, 0.884674608707428, 0.13978415727615356, -0.10401517152786255, -0.5522866249084473, 0.6192028522491455, 0.0895904004573822, -0.8368503451347351, -0.4110971987247467, -0.5234448909759521, -1.091624140739441, 0.36964789032936096, -0.12309205532073975, -0.851021945476532, 0.1381836086511612, 0.027340667322278023, -0.2471281886100769, 0.5234086513519287, -0.5398143529891968, 0.8009771704673767, -0.14754828810691833, -0.48195189237594604, 0.10908222943544388, -0.8531264662742615, 0.4696531891822815, 0.14868561923503876, 0.23804692924022675, 0.04595766216516495, 0.2549497187137604, 1.2144414186477661, -0.8324468731880188, 0.4236721992492676, 0.09657987207174301, -0.011671309359371662, 0.31504303216934204, -0.22157621383666992, 0.5088239312171936, 0.08179222792387009, -0.04203198105096817, -0.13001218438148499, 0.3142538070678711, -0.8644676208496094, -0.09261263906955719, 0.943892240524292, -0.9996522665023804, -0.6131861805915833, -0.9198787808418274, -0.5349529981613159, 0.10722818225622177, 0.5501932501792908, 0.38780343532562256, 0.5460169315338135, 0.02645016647875309, 0.4495195746421814, 0.8486817479133606, -0.12925143539905548, 0.5815467834472656, 0.18478813767433167, 0.06106420233845711, -0.6643286943435669, 0.8506479263305664, 0.13622738420963287, 0.37319740653038025, 0.27766987681388855, 0.3636491298675537, -0.53244948387146, -0.18493938446044922, -0.21661892533302307, 0.4980567693710327, -0.6642717719078064, -0.277711421251297, -0.3640756905078888, -0.3946859836578369, -0.7519258260726929, -0.6550671458244324, -0.3059579133987427, -0.5318363904953003, -0.45162877440452576, -0.47406476736068726, 0.5749260783195496, 0.5034198760986328, -0.4183351695537567, 0.059013303369283676, -0.5001278519630432, 0.329048752784729, 0.2926827669143677, 0.5476478338241577, -0.4042607843875885, -0.5595999956130981, 0.012496206909418106, -0.1049102321267128, -0.5354974865913391, -0.928172767162323, 0.33099138736724854, -0.05067797005176544, 0.4857478439807892, 0.6259484887123108, 0.08834473788738251, 0.8136327266693115, -0.17920741438865662, 1.01215398311615, 0.3348187208175659, -0.7994683384895325, 0.7109043002128601, -0.3029347062110901, 0.16581524908542633, 0.6041989326477051, 0.17559680342674255, -0.18723680078983307, -0.6334923505783081, -1.2758495807647705, -0.7796792387962341, 0.6699584722518921, 0.4229201078414917, -0.2567793130874634, 0.047945160418748856, 0.17301031947135925, -0.29542219638824463, -0.21313858032226562, -0.6945841312408447, -0.9441666007041931, -0.12908287346363068, -0.5006524324417114, 0.09559473395347595, 0.020488345995545387, -0.38848623633384705, -0.8213478922843933, 0.9385912418365479, 0.02422940358519554, 0.5834012031555176, 0.4638327956199646, 0.08527829498052597, 0.08350870013237, 0.473884254693985, 0.9065497517585754, 0.7380408644676208, -0.4416448473930359, 0.4075552225112915, 0.3862951099872589, -1.0316133499145508, 0.5306923985481262, 0.3188151717185974, -0.07980645447969437, -0.031301017850637436, 0.4407927989959717, 0.42714011669158936, 0.006351081188768148, -0.22015835344791412, 0.6094793081283569, -0.017815034836530685, -0.5248139500617981, -0.4136817157268524, 0.12406192719936371, -0.14045459032058716, 0.03294317051768303, 0.37346649169921875, -0.15281619131565094, -0.06747011095285416, -0.505203366279602, 0.4500274360179901, 0.37693488597869873, -0.470053106546402, -0.17350083589553833, 0.7324212789535522, -0.18425253033638, -0.1299203783273697, 0.29838138818740845, -0.19972659647464752, -0.6214673519134521, 1.132645606994629, 0.5634381771087646, 0.6717327237129211, -0.2372349202632904, -0.05933462455868721, 0.8788455128669739, 0.3565373718738556, -0.022549407556653023, 0.5741394758224487, 0.3275550603866577, -0.24485701322555542, 0.15770156681537628, -0.823732852935791, -0.029953237622976303, 0.12050841748714447, -0.8295897841453552, 0.3461746573448181, -0.5199652910232544, -0.20056580007076263, 0.026455048471689224, 0.44655096530914307, -0.42528867721557617, 0.5713055729866028, -0.38741663098335266, 1.2490919828414917, -0.9614229202270508, 0.7424368858337402, 0.7532037496566772, -0.55962073802948, -1.0216584205627441, -0.5767155885696411, 0.007489492651075125, -0.7967249155044556, 0.6024293303489685, -0.06248244270682335, 0.14130979776382446, -0.06963329017162323, -0.7417384386062622, -0.9328576922416687, 1.3774147033691406, -0.06317590177059174, -0.42015454173088074, 0.20481523871421814, -0.03429863974452019, 0.4221334159374237, 0.1818695366382599, 0.5526735186576843, 0.7587295770645142, 0.8195516467094421, -0.1030229702591896, -0.7176645398139954, 0.3398683965206146, -0.5472230315208435, -0.3499685227870941, 0.499247282743454, -0.90283203125, 1.2200578451156616, -0.020828017964959145, 0.2149505615234375, -0.0988096222281456, 0.6829231977462769, 0.7546935081481934, 0.29411250352859497, 0.3541077673435211, 0.9115331172943115, 0.85474693775177, -0.4935518801212311, 1.0477826595306396, -0.20256175100803375, 0.8423712253570557, 0.690862238407135, 0.1883523166179657, 0.769213855266571, 0.6782417297363281, -0.5228271484375, 0.5640936493873596, 0.7547281980514526, -0.28999990224838257, 0.37206152081489563, 0.2881908118724823, -0.1266576051712036, -0.10496360063552856, 0.4193696677684784, -0.8812156319618225, 0.13446788489818573, 0.06320146471261978, -0.36814987659454346, 0.10827278345823288, -0.4013284146785736, 0.3403439223766327, -0.08615419268608093, -0.04682699963450432, 0.3392745554447174, 0.02894696593284607, -0.42617079615592957, 0.9777097702026367, -0.14485791325569153, 0.7441040277481079, -0.541178286075592, -0.07786307483911514, -0.39143601059913635, 0.6619206070899963, -0.45990291237831116, -1.0703492164611816, 0.16641755402088165, 0.08061688393354416, -0.17856450378894806, -0.15856538712978363, 0.6569702625274658, -0.1613716334104538, -0.7912202477455139, 0.14914385974407196, 0.08132456243038177, 0.09188120067119598, 0.500275194644928, -0.6573249101638794, -0.2932019531726837, -0.03442559391260147, -0.5551672577857971, 0.10196510702371597, 0.30864739418029785, 0.2261071801185608, 0.5775308609008789, 0.6371498703956604, 0.15521706640720367, 0.43470120429992676, -0.5905090570449829, 0.8167673945426941, -1.104029655456543, -0.7246885895729065, -0.9096429944038391, 0.4586120843887329, -0.3158116638660431, -0.8740022778511047, 0.987786054611206, 1.0089712142944336, 0.8779164552688599, 0.03212457895278931, 0.6630460619926453, -0.43143701553344727, 0.2780902683734894, -0.4025399088859558, 0.9746609330177307, -0.8576180934906006, -0.2267494797706604, -0.26675641536712646, -0.679020345211029, -0.38561272621154785, 0.8277118802070618, -0.13406150043010712, 0.05761181563138962, 1.0609004497528076, 0.6792360544204712, -0.11344164609909058, -0.019548343494534492, -0.01538672111928463, 0.6052442193031311, 0.3866945207118988, 0.9837846755981445, 0.653959333896637, -0.8033661842346191, 0.36345845460891724, -0.49854570627212524, -0.42416682839393616, -0.3663862347602844, -0.45656725764274597, -0.8945126533508301, -0.513239324092865, -0.22282250225543976, -0.6439568996429443, -0.12061989307403564, 1.051529884338379, 0.46314847469329834, -0.9086459279060364, -0.42352211475372314, -0.03806030750274658, 0.18008404970169067, -0.6058950424194336, -0.4038372337818146, 0.7468564510345459, -0.09018251299858093, -0.562799334526062, 0.17735369503498077, -0.16341401636600494, 0.20272745192050934, 0.08216624706983566, -0.4208284020423889, -0.7535958886146545, 0.0675184354186058, 0.4372837245464325, 0.35407742857933044, -0.6723846793174744, -0.6950759887695312, 0.3190551698207855, -0.5322113037109375, 0.4765707552433014, -0.03799153491854668, -0.5718480944633484, -0.003038243390619755, 0.7266378998756409, 0.48016878962516785, 0.6348949670791626, -0.04255468025803566, 0.05866081267595291, -0.6580629944801331, 0.1488160789012909, -0.08755532652139664, 0.281548410654068, -0.03786585107445717, -0.29142966866493225, 0.8168161511421204, 0.6268337965011597, -0.5120601058006287, -1.0934069156646729, -0.4337497353553772, -1.441339373588562, -0.003261373145505786, 1.1035209894180298, -0.010519706644117832, -0.5170464515686035, 0.24673381447792053, -0.1894858181476593, 0.17349377274513245, -0.3010725975036621, 0.7671436667442322, 0.7463083863258362, -0.35903337597846985, 0.11398687213659286, -0.6186618208885193, 0.34620004892349243, 0.5113077759742737, -1.2174627780914307, -0.10651122033596039, 0.17770171165466309, 0.28067898750305176, 0.39685726165771484, 0.5877455472946167, -0.07075849920511246, 0.2824453115463257, 0.20676660537719727, 0.032204270362854004, 0.012745688669383526, 0.0753619521856308, -0.22702133655548096, 0.06980153918266296, -0.2614952325820923, -0.46860721707344055 ]
open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-Base-7B-v0.1
open-llm-leaderboard
2023-10-23T19:09:01Z
201
0
[ "region:us" ]
null
2023-08-18T11:51:03Z
--- pretty_name: Evaluation run of togethercomputer/RedPajama-INCITE-Base-7B-v0.1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [togethercomputer/RedPajama-INCITE-Base-7B-v0.1](https://huggingface.co/togethercomputer/RedPajama-INCITE-Base-7B-v0.1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-Base-7B-v0.1\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-23T19:08:48.233759](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-Base-7B-v0.1/blob/main/results_2023-10-23T19-08-48.233759.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n\ \ \"em_stderr\": 0.00033145814652192694,\n \"f1\": 0.05110738255033561,\n\ \ \"f1_stderr\": 0.0012343063700893503,\n \"acc\": 0.3445825177884037,\n\ \ \"acc_stderr\": 0.008314908287260184\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192694,\n\ \ \"f1\": 0.05110738255033561,\n \"f1_stderr\": 0.0012343063700893503\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01592115238817286,\n \ \ \"acc_stderr\": 0.0034478192723889985\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6732438831886346,\n \"acc_stderr\": 0.013181997302131368\n\ \ }\n}\n```" repo_url: https://huggingface.co/togethercomputer/RedPajama-INCITE-Base-7B-v0.1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|arc:challenge|25_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T16:33:56.917496.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_23T19_08_48.233759 path: - '**/details_harness|drop|3_2023-10-23T19-08-48.233759.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-23T19-08-48.233759.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_23T19_08_48.233759 path: - '**/details_harness|gsm8k|5_2023-10-23T19-08-48.233759.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-23T19-08-48.233759.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hellaswag|10_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:33:56.917496.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:33:56.917496.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T16_33_56.917496 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T16:33:56.917496.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T16:33:56.917496.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_23T19_08_48.233759 path: - '**/details_harness|winogrande|5_2023-10-23T19-08-48.233759.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-23T19-08-48.233759.parquet' - config_name: results data_files: - split: 2023_07_19T16_33_56.917496 path: - results_2023-07-19T16:33:56.917496.parquet - split: 2023_10_23T19_08_48.233759 path: - results_2023-10-23T19-08-48.233759.parquet - split: latest path: - results_2023-10-23T19-08-48.233759.parquet --- # Dataset Card for Evaluation run of togethercomputer/RedPajama-INCITE-Base-7B-v0.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/togethercomputer/RedPajama-INCITE-Base-7B-v0.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [togethercomputer/RedPajama-INCITE-Base-7B-v0.1](https://huggingface.co/togethercomputer/RedPajama-INCITE-Base-7B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-Base-7B-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T19:08:48.233759](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-Base-7B-v0.1/blob/main/results_2023-10-23T19-08-48.233759.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192694, "f1": 0.05110738255033561, "f1_stderr": 0.0012343063700893503, "acc": 0.3445825177884037, "acc_stderr": 0.008314908287260184 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192694, "f1": 0.05110738255033561, "f1_stderr": 0.0012343063700893503 }, "harness|gsm8k|5": { "acc": 0.01592115238817286, "acc_stderr": 0.0034478192723889985 }, "harness|winogrande|5": { "acc": 0.6732438831886346, "acc_stderr": 0.013181997302131368 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4889729917049408, -0.6999567747116089, 0.18417783081531525, 0.3704270124435425, -0.15103423595428467, 0.1387919783592224, -0.4176150858402252, -0.2704761028289795, 0.5065457820892334, 0.5828445553779602, -0.7662510275840759, -0.9030919671058655, -0.754949688911438, 0.17660680413246155, -0.16578008234500885, 1.0922365188598633, -0.24358263611793518, -0.2737596035003662, -0.17784686386585236, -0.34746402502059937, -0.5070685744285583, -0.46500712633132935, -0.4257456660270691, -0.5022314190864563, 0.2600457966327667, 0.6613376140594482, 0.4028174579143524, 0.6012527346611023, 0.7350985407829285, 0.40001997351646423, -0.09695331007242203, 0.1915471851825714, -0.42440709471702576, -0.00007513352466048673, 0.23917576670646667, -0.5778358578681946, -0.7253288626670837, 0.07833589613437653, 0.6088754534721375, 0.5162330865859985, -0.15923592448234558, 0.6879615187644958, 0.08741090446710587, 0.6151953339576721, -0.4788897931575775, 0.40726906061172485, -0.34964248538017273, -0.026680367067456245, -0.3410469591617584, -0.1993396282196045, -0.03727959096431732, -0.38263970613479614, -0.1913830190896988, -0.5167210698127747, 0.15697339177131653, 0.037686221301555634, 0.9723540544509888, 0.19440589845180511, -0.09311801940202713, -0.2393864542245865, -0.24221013486385345, 0.8206341862678528, -0.8396828174591064, -0.050808459520339966, 0.6312710642814636, 0.1088854968547821, -0.3640120327472687, -0.6213864088058472, -0.3469002842903137, -0.11579972505569458, -0.3458130359649658, 0.17673611640930176, 0.032608065754175186, -0.16524508595466614, 0.43747201561927795, 0.6117453575134277, -0.6581827402114868, -0.0797107145190239, -0.5532351136207581, -0.06769980490207672, 0.9819578528404236, 0.3553816080093384, 0.17624525725841522, -0.42759183049201965, -0.38260024785995483, -0.35071662068367004, -0.4537140429019928, 0.21155281364917755, 0.39892446994781494, 0.5479790568351746, -0.6994390487670898, 0.7863102555274963, -0.4019668400287628, 0.5029273629188538, -0.06448948383331299, -0.26256945729255676, 0.8352382183074951, -0.6046720147132874, -0.2642677426338196, 0.026743004098534584, 1.0592578649520874, 0.3899565041065216, 0.051457978785037994, 0.19845423102378845, -0.15271079540252686, 0.08980800956487656, 0.1489386111497879, -0.763696014881134, -0.22600120306015015, 0.40144503116607666, -0.5620266199111938, -0.31505265831947327, 0.19952861964702606, -0.943078339099884, -0.21386907994747162, -0.29947978258132935, 0.26927822828292847, -0.17141833901405334, -0.3991009294986725, -0.14063599705696106, -0.12090639024972916, 0.16700959205627441, 0.19082970917224884, -0.6647543907165527, 0.475604385137558, 0.6117905974388123, 0.9288549423217773, -0.14515870809555054, -0.38539424538612366, -0.25417187809944153, -0.293971449136734, -0.08342137932777405, 0.4585161805152893, -0.23469477891921997, -0.45966967940330505, -0.2416999638080597, 0.28919556736946106, -0.3275132477283478, -0.6546856164932251, 0.5600055456161499, -0.20388540625572205, 0.17204953730106354, -0.19961494207382202, -0.4241319000720978, -0.16980844736099243, 0.45116809010505676, -0.6393845677375793, 1.4384804964065552, 0.31300002336502075, -0.864528238773346, 0.10415814816951752, -0.866860568523407, -0.2465166598558426, 0.0494176484644413, -0.08008639514446259, -0.6540141105651855, -0.1267346888780594, 0.1656394898891449, 0.5519824624061584, -0.33832356333732605, 0.0683140680193901, -0.3140414357185364, -0.4354501962661743, 0.16644026339054108, -0.047425076365470886, 1.1135858297348022, 0.16489560902118683, -0.45645618438720703, 0.1427174061536789, -0.9629762768745422, 0.15983439981937408, 0.37843620777130127, -0.4759711027145386, -0.218301460146904, -0.3099190890789032, 0.03844808042049408, 0.1772460639476776, 0.6143000721931458, -0.5530657172203064, 0.4127759337425232, -0.19750425219535828, 0.2262851595878601, 0.8928586840629578, 0.044616565108299255, 0.2494354397058487, -0.45357075333595276, 0.6185494065284729, -0.019874917343258858, 0.3772316873073578, 0.07647649943828583, -0.6098713874816895, -0.8064326643943787, -0.20747095346450806, 0.04894193634390831, 0.6157923340797424, -0.3647407293319702, 0.6539638042449951, -0.37977278232574463, -0.6969013810157776, -0.7325865030288696, 0.14381292462348938, 0.4738624095916748, 0.4651075601577759, 0.4493463337421417, -0.3201420307159424, -0.7390288710594177, -1.0250085592269897, 0.03375472500920296, -0.1894337236881256, 0.12255368381738663, 0.5214476585388184, 0.977837860584259, -0.3455505073070526, 0.6852298974990845, -0.6366473436355591, -0.31785449385643005, -0.26413676142692566, 0.00018502716557122767, 0.8356572389602661, 0.5475870370864868, 0.40560397505760193, -0.6560619473457336, -0.3914068341255188, -0.018082235008478165, -0.841181755065918, -0.2039518505334854, -0.1388881802558899, -0.22883565723896027, 0.2579493522644043, -0.056178413331508636, -0.5008450150489807, 0.49659213423728943, 0.636913537979126, -0.6323150992393494, 0.5838139057159424, -0.080813467502594, 0.5125840306282043, -1.1353651285171509, 0.1740037202835083, 0.015621485188603401, -0.06013055890798569, -0.3717031180858612, -0.05448426678776741, 0.033593665808439255, 0.30277860164642334, -0.4220883548259735, 0.6229673624038696, -0.47756102681159973, -0.24235999584197998, 0.0160942692309618, 0.14549486339092255, -0.08588489145040512, 0.5526347160339355, -0.32766538858413696, 0.8082141280174255, 0.45096200704574585, -0.3908446133136749, 0.3890422582626343, 0.5149456262588501, -0.5193889141082764, 0.21592910587787628, -0.44329726696014404, 0.012235415168106556, 0.22477437555789948, 0.13409428298473358, -0.8698948621749878, -0.3669528663158417, 0.45924219489097595, -0.6191621422767639, 0.256937712430954, -0.33845487236976624, -0.5888819098472595, -0.4669328033924103, -0.48606476187705994, 0.18226726353168488, 0.515717625617981, -0.5459304451942444, 0.2979902923107147, 0.5825251340866089, -0.026929978281259537, -0.6631534695625305, -0.7388795018196106, -0.1346171349287033, -0.36299243569374084, -0.7735931873321533, 0.31055188179016113, -0.22362500429153442, -0.3266163766384125, -0.06334369629621506, -0.044924937188625336, -0.06032925099134445, 0.2662607431411743, 0.48735588788986206, 0.549557089805603, -0.002870068186894059, -0.31849074363708496, -0.17920702695846558, -0.1078602597117424, 0.19413639605045319, 0.236475870013237, 0.59609454870224, -0.2692152261734009, -0.2256801724433899, -0.219736248254776, 0.18634696304798126, 0.4602688252925873, -0.057188473641872406, 0.8641633987426758, 0.5576752424240112, -0.22468052804470062, -0.05718254670500755, -0.3934178352355957, -0.024765556678175926, -0.4867005944252014, 0.2514883875846863, -0.3636101484298706, -0.825860321521759, 0.8246872425079346, 0.14365078508853912, 0.15611369907855988, 0.685801088809967, 0.6867133975028992, 0.09952110052108765, 0.7328094840049744, 0.3065284490585327, -0.04538026452064514, 0.5127225518226624, -0.7307391166687012, -0.13463720679283142, -1.0903655290603638, -0.47747474908828735, -0.5005907416343689, -0.3990146517753601, -0.823769748210907, -0.3028111755847931, 0.2665346562862396, 0.19664905965328217, -0.42671164870262146, 0.603222131729126, -0.8033893704414368, 0.25187450647354126, 0.681484580039978, 0.2030065357685089, 0.10275519639253616, -0.0729459896683693, -0.051043279469013214, 0.17631328105926514, -0.4636296331882477, -0.32270336151123047, 1.306612253189087, 0.2488584816455841, 0.7136088609695435, -0.011387883685529232, 1.0112537145614624, 0.28987425565719604, 0.42126989364624023, -0.4364086985588074, 0.6462506651878357, -0.020785022526979446, -0.5460679531097412, -0.08900033682584763, -0.6092196106910706, -0.9633563756942749, 0.27900877594947815, -0.09640830755233765, -1.0067650079727173, 0.05277015268802643, 0.035565540194511414, 0.032609567046165466, 0.25453171133995056, -0.6533557772636414, 0.9084915518760681, -0.22479283809661865, -0.35651201009750366, -0.0073989988304674625, -0.8605365753173828, 0.4680591821670532, 0.08509978652000427, 0.3365950286388397, -0.2970712184906006, 0.08765366673469543, 1.1447638273239136, -0.5998210906982422, 0.7485169768333435, -0.19643181562423706, 0.14341627061367035, 0.3846980035305023, -0.3065239191055298, 0.6260289549827576, -0.010543417185544968, -0.18528218567371368, 0.5807488560676575, -0.15068119764328003, -0.21140903234481812, -0.291071355342865, 0.997857391834259, -0.9329770803451538, -0.3971366584300995, -0.4731471836566925, -0.5756673812866211, 0.19024813175201416, 0.21191638708114624, 0.34791916608810425, 0.21512047946453094, 0.12191618233919144, 0.27818435430526733, 0.287263959646225, -0.19147703051567078, 0.4749487638473511, 0.358821302652359, -0.23355339467525482, -0.7262063026428223, 0.6155673265457153, 0.22939923405647278, 0.09424732625484467, 0.1980351358652115, 0.05339426174759865, -0.5260408520698547, -0.44002726674079895, -0.3774474859237671, 0.31821197271347046, -0.6128128170967102, -0.37290287017822266, -0.5250584483146667, -0.21973560750484467, -0.44176357984542847, -0.06541194021701813, -0.483821302652359, -0.4930975139141083, -0.43583083152770996, -0.1510685831308365, 0.6619893312454224, 0.5270558595657349, -0.28877079486846924, 0.2476072907447815, -0.8027018308639526, 0.21659038960933685, -0.25376808643341064, 0.4109358787536621, -0.15381723642349243, -0.5555498600006104, -0.4538500905036926, 0.10741307586431503, -0.3794723153114319, -0.8110173344612122, 0.6016103625297546, -0.1075553148984909, 0.6537759304046631, -0.05100906267762184, 0.16917076706886292, 0.8329035043716431, -0.15319831669330597, 0.9814222455024719, 0.07203860580921173, -0.7154591083526611, 0.8183991312980652, -0.20972685515880585, 0.20120009779930115, 0.6180219054222107, 0.16237491369247437, -0.458705872297287, -0.33695414662361145, -0.9730347990989685, -1.13895845413208, 1.070206880569458, 0.5694846510887146, -0.293709933757782, 0.048988569527864456, 0.2723533511161804, -0.022143948823213577, 0.16999980807304382, -0.7239716649055481, -0.8872973918914795, -0.0899239033460617, -0.2844346761703491, 0.00821241457015276, -0.01900419220328331, -0.47841477394104004, -0.35254380106925964, 0.8873525857925415, 0.032630711793899536, 0.4705001711845398, 0.18871359527111053, -0.08921071141958237, -0.17138069868087769, 0.28731271624565125, 0.47347453236579895, 0.7578675150871277, -0.42199069261550903, -0.12334439903497696, 0.32577645778656006, -0.5977950692176819, 0.08713871240615845, 0.34956008195877075, -0.005098098888993263, -0.12118107080459595, 0.6002388000488281, 0.9110720157623291, 0.0554734505712986, -0.4146503210067749, 0.5256767272949219, -0.08793940395116806, -0.20569773018360138, -0.5532972812652588, 0.18750375509262085, -0.05543564632534981, 0.4410683810710907, 0.3494493067264557, -0.04547420144081116, -0.02122846432030201, -0.329448401927948, 0.2720770239830017, 0.33030885457992554, -0.04291501268744469, -0.27734091877937317, 0.568962037563324, -0.05113102123141289, -0.3481765389442444, 0.7235009074211121, -0.039840321987867355, -0.522628903388977, 1.1188311576843262, 0.4488833248615265, 0.8556538224220276, -0.058793146163225174, 0.02708369679749012, 0.6137961745262146, 0.3448449373245239, -0.12049832940101624, 0.551152229309082, 0.031899090856313705, -0.5968716144561768, -0.302485853433609, -0.8414440751075745, -0.29585862159729004, 0.2903558015823364, -0.9833784699440002, 0.34510520100593567, -0.14545822143554688, -0.23565219342708588, -0.20326149463653564, 0.4522823393344879, -0.8468108177185059, 0.1628454476594925, 0.014312704093754292, 0.8833717703819275, -0.976729154586792, 0.6716314554214478, 0.8992597460746765, -0.5024962425231934, -0.8910609483718872, -0.3869398534297943, 0.022877583280205727, -0.8595735430717468, 0.5229859352111816, 0.32886937260627747, 0.36871057748794556, -0.20901137590408325, -0.6033685803413391, -1.070111632347107, 1.4935661554336548, 0.1744987666606903, -0.573750913143158, 0.25980278849601746, 0.07596823573112488, 0.33454960584640503, -0.3158397972583771, 0.5882514119148254, 0.724234402179718, 0.7350751161575317, -0.07924269139766693, -0.9409003257751465, 0.3454982042312622, -0.5203444361686707, -0.1384504735469818, 0.442612886428833, -0.8811209797859192, 1.0408294200897217, -0.16199342906475067, -0.10944551974534988, -0.08921679109334946, 0.4077923595905304, 0.6240617036819458, 0.3157939612865448, 0.4422057867050171, 0.7566993236541748, 0.6384780406951904, -0.3422032296657562, 1.152917742729187, -0.28936880826950073, 0.9221753478050232, 1.1731475591659546, 0.09097708016633987, 0.6571428775787354, 0.3552037477493286, -0.5497586131095886, 0.5237034559249878, 0.7711443901062012, -0.3410295844078064, 0.4171030521392822, 0.11965560168027878, -0.10720009356737137, -0.05689617618918419, 0.05712941661477089, -0.5157683491706848, 0.3346792161464691, 0.2629842758178711, -0.5665297508239746, -0.17908696830272675, -0.2978927791118622, 0.13947229087352753, -0.25052040815353394, -0.23954160511493683, 0.6578364968299866, 0.0310660470277071, -0.6013517379760742, 0.7187364101409912, -0.1527003049850464, 0.7360393404960632, -0.6812213659286499, -0.1826726198196411, -0.2766357958316803, 0.34350982308387756, -0.5246880650520325, -1.0529279708862305, 0.26310989260673523, 0.0477292574942112, -0.24371054768562317, -0.14078600704669952, 0.6267865896224976, -0.32576993107795715, -0.5203785300254822, 0.5250642895698547, 0.31775209307670593, 0.31922468543052673, 0.1323564648628235, -0.958955705165863, 0.34674185514450073, 0.2938004434108734, -0.7394154071807861, 0.4827466309070587, 0.15972010791301727, 0.08238401263952255, 0.6178460717201233, 0.7080674767494202, 0.0966254323720932, 0.13830320537090302, -0.06165847182273865, 1.099393606185913, -0.7772343754768372, -0.36115264892578125, -0.8443301916122437, 0.8796941637992859, -0.2645561993122101, -0.5859817862510681, 0.9230863451957703, 0.8566774129867554, 0.8538758158683777, 0.11634715646505356, 0.792273223400116, -0.47807395458221436, 0.45201003551483154, -0.3502409756183624, 0.8216380476951599, -0.7554565668106079, 0.30698028206825256, -0.18140898644924164, -0.8489968776702881, -0.16077487170696259, 0.7227835655212402, -0.15019920468330383, -0.043940264731645584, 0.5744447112083435, 0.9588016271591187, 0.04805270582437515, 0.0996917262673378, -0.025569364428520203, 0.4142259657382965, 0.32234013080596924, 0.610055685043335, 0.6082077026367188, -0.6880082488059998, 0.43534111976623535, -0.7065297365188599, -0.4483947455883026, -0.16795682907104492, -0.6643829345703125, -0.8169098496437073, -0.4641571044921875, -0.3069755434989929, -0.5939309000968933, -0.023297840729355812, 1.1235381364822388, 0.4772745370864868, -0.8469191193580627, -0.4059950113296509, -0.05138544738292694, 0.2066086381673813, -0.16849927604198456, -0.3476032316684723, 0.558533251285553, -0.11267632991075516, -0.7676169872283936, 0.4123436212539673, -0.06285601109266281, -0.20399704575538635, -0.043963074684143066, -0.21072573959827423, -0.4220263957977295, -0.332232803106308, 0.4220174252986908, 0.22189195454120636, -0.6265069842338562, -0.3083668053150177, -0.2369418740272522, -0.03057881072163582, 0.3571147620677948, 0.3361101448535919, -0.6945347785949707, 0.20017896592617035, 0.5989881157875061, 0.290534645318985, 0.7313562631607056, 0.06642715632915497, 0.18595264852046967, -0.8021058440208435, 0.004372454714030027, 0.014201600104570389, 0.561233401298523, 0.22786183655261993, -0.49676060676574707, 1.0248174667358398, 0.29915574193000793, -0.810296893119812, -0.8478044867515564, -0.234140545129776, -1.226479172706604, -0.018729345872998238, 1.304779291152954, -0.3319898247718811, -0.33968135714530945, 0.08142734318971634, -0.12402528524398804, 0.3487233519554138, -0.6999236345291138, 0.5990282893180847, 0.654948353767395, -0.4217347800731659, -0.04205635190010071, -0.4929632246494293, 0.2665531635284424, 0.025875400751829147, -1.105600118637085, 0.01743317022919655, 0.34743574261665344, 0.42404904961586, 0.30139800906181335, 0.7318698763847351, -0.06640421599149704, -0.11933018267154694, 0.03767523169517517, 0.2516844868659973, -0.239899680018425, -0.14132052659988403, -0.19842542707920074, 0.17379344999790192, -0.3824734687805176, -0.450187623500824 ]
open-llm-leaderboard/details_AGI-inc__lora_moe_7b
open-llm-leaderboard
2023-08-27T12:38:05Z
201
0
[ "region:us" ]
null
2023-08-18T11:54:15Z
--- pretty_name: Evaluation run of AGI-inc/lora_moe_7b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [AGI-inc/lora_moe_7b](https://huggingface.co/AGI-inc/lora_moe_7b) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AGI-inc__lora_moe_7b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-24T11:46:25.370436](https://huggingface.co/datasets/open-llm-leaderboard/details_AGI-inc__lora_moe_7b/blob/main/results_2023-07-24T11%3A46%3A25.370436.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3624349655819883,\n\ \ \"acc_stderr\": 0.03457932037185986,\n \"acc_norm\": 0.36641755034742307,\n\ \ \"acc_norm_stderr\": 0.03456622803809125,\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.014509045171487291,\n \"mc2\": 0.3433554241758255,\n\ \ \"mc2_stderr\": 0.01319092242364727\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.47696245733788395,\n \"acc_stderr\": 0.014595873205358267,\n\ \ \"acc_norm\": 0.5093856655290102,\n \"acc_norm_stderr\": 0.014608816322065\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5754829715196176,\n\ \ \"acc_stderr\": 0.004932593348813628,\n \"acc_norm\": 0.7780322644891456,\n\ \ \"acc_norm_stderr\": 0.004147202539759587\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3851851851851852,\n\ \ \"acc_stderr\": 0.042039210401562783,\n \"acc_norm\": 0.3851851851851852,\n\ \ \"acc_norm_stderr\": 0.042039210401562783\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.03860731599316092,\n\ \ \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.03860731599316092\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.41,\n\ \ \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.3622641509433962,\n \"acc_stderr\": 0.0295822451283843,\n\ \ \"acc_norm\": 0.3622641509433962,\n \"acc_norm_stderr\": 0.0295822451283843\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3819444444444444,\n\ \ \"acc_stderr\": 0.040629907841466674,\n \"acc_norm\": 0.3819444444444444,\n\ \ \"acc_norm_stderr\": 0.040629907841466674\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3236994219653179,\n\ \ \"acc_stderr\": 0.035676037996391685,\n \"acc_norm\": 0.3236994219653179,\n\ \ \"acc_norm_stderr\": 0.035676037996391685\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171451,\n\ \ \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171451\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"\ acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.37446808510638296,\n \"acc_stderr\": 0.03163910665367291,\n\ \ \"acc_norm\": 0.37446808510638296,\n \"acc_norm_stderr\": 0.03163910665367291\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n\ \ \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2619047619047619,\n \"acc_stderr\": 0.022644212615525214,\n \"\ acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.022644212615525214\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n\ \ \"acc_stderr\": 0.038932596106046734,\n \"acc_norm\": 0.25396825396825395,\n\ \ \"acc_norm_stderr\": 0.038932596106046734\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.33548387096774196,\n\ \ \"acc_stderr\": 0.02686020644472435,\n \"acc_norm\": 0.33548387096774196,\n\ \ \"acc_norm_stderr\": 0.02686020644472435\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.031785297106427496,\n\ \ \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.031785297106427496\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\"\ : 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.43636363636363634,\n \"acc_stderr\": 0.03872592983524754,\n\ \ \"acc_norm\": 0.43636363636363634,\n \"acc_norm_stderr\": 0.03872592983524754\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.3333333333333333,\n \"acc_stderr\": 0.03358618145732522,\n \"\ acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.03358618145732522\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.45077720207253885,\n \"acc_stderr\": 0.03590910952235525,\n\ \ \"acc_norm\": 0.45077720207253885,\n \"acc_norm_stderr\": 0.03590910952235525\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.34102564102564104,\n \"acc_stderr\": 0.024035489676335065,\n\ \ \"acc_norm\": 0.34102564102564104,\n \"acc_norm_stderr\": 0.024035489676335065\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712173,\n \ \ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712173\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.3277310924369748,\n \"acc_stderr\": 0.03048991141767323,\n \ \ \"acc_norm\": 0.3277310924369748,\n \"acc_norm_stderr\": 0.03048991141767323\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.26490066225165565,\n \"acc_stderr\": 0.036030385453603854,\n \"\ acc_norm\": 0.26490066225165565,\n \"acc_norm_stderr\": 0.036030385453603854\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.48440366972477067,\n \"acc_stderr\": 0.02142689153920805,\n \"\ acc_norm\": 0.48440366972477067,\n \"acc_norm_stderr\": 0.02142689153920805\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.30092592592592593,\n \"acc_stderr\": 0.03128039084329881,\n \"\ acc_norm\": 0.30092592592592593,\n \"acc_norm_stderr\": 0.03128039084329881\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.35784313725490197,\n \"acc_stderr\": 0.03364487286088299,\n \"\ acc_norm\": 0.35784313725490197,\n \"acc_norm_stderr\": 0.03364487286088299\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.43037974683544306,\n \"acc_stderr\": 0.03223017195937598,\n \ \ \"acc_norm\": 0.43037974683544306,\n \"acc_norm_stderr\": 0.03223017195937598\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3991031390134529,\n\ \ \"acc_stderr\": 0.032867453125679603,\n \"acc_norm\": 0.3991031390134529,\n\ \ \"acc_norm_stderr\": 0.032867453125679603\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.3435114503816794,\n \"acc_stderr\": 0.041649760719448786,\n\ \ \"acc_norm\": 0.3435114503816794,\n \"acc_norm_stderr\": 0.041649760719448786\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5206611570247934,\n \"acc_stderr\": 0.04560456086387235,\n \"\ acc_norm\": 0.5206611570247934,\n \"acc_norm_stderr\": 0.04560456086387235\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4166666666666667,\n\ \ \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.4166666666666667,\n\ \ \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.4294478527607362,\n \"acc_stderr\": 0.038890666191127216,\n\ \ \"acc_norm\": 0.4294478527607362,\n \"acc_norm_stderr\": 0.038890666191127216\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\ \ \"acc_stderr\": 0.042466243366976256,\n \"acc_norm\": 0.2767857142857143,\n\ \ \"acc_norm_stderr\": 0.042466243366976256\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.33980582524271846,\n \"acc_stderr\": 0.04689765937278133,\n\ \ \"acc_norm\": 0.33980582524271846,\n \"acc_norm_stderr\": 0.04689765937278133\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.47863247863247865,\n\ \ \"acc_stderr\": 0.03272616447634954,\n \"acc_norm\": 0.47863247863247865,\n\ \ \"acc_norm_stderr\": 0.03272616447634954\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.4278416347381865,\n\ \ \"acc_stderr\": 0.01769278792780373,\n \"acc_norm\": 0.4278416347381865,\n\ \ \"acc_norm_stderr\": 0.01769278792780373\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.3901734104046243,\n \"acc_stderr\": 0.026261677607806653,\n\ \ \"acc_norm\": 0.3901734104046243,\n \"acc_norm_stderr\": 0.026261677607806653\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.3954248366013072,\n \"acc_stderr\": 0.027996723180631445,\n\ \ \"acc_norm\": 0.3954248366013072,\n \"acc_norm_stderr\": 0.027996723180631445\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3987138263665595,\n\ \ \"acc_stderr\": 0.0278093225857745,\n \"acc_norm\": 0.3987138263665595,\n\ \ \"acc_norm_stderr\": 0.0278093225857745\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.3487654320987654,\n \"acc_stderr\": 0.02651759772446501,\n\ \ \"acc_norm\": 0.3487654320987654,\n \"acc_norm_stderr\": 0.02651759772446501\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2730496453900709,\n \"acc_stderr\": 0.026577860943307857,\n \ \ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.026577860943307857\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.29595827900912647,\n\ \ \"acc_stderr\": 0.011658518525277054,\n \"acc_norm\": 0.29595827900912647,\n\ \ \"acc_norm_stderr\": 0.011658518525277054\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.030161911930767102,\n\ \ \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.030161911930767102\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.35294117647058826,\n \"acc_stderr\": 0.01933314202079706,\n \ \ \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.01933314202079706\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.41818181818181815,\n\ \ \"acc_stderr\": 0.0472457740573157,\n \"acc_norm\": 0.41818181818181815,\n\ \ \"acc_norm_stderr\": 0.0472457740573157\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.34285714285714286,\n \"acc_stderr\": 0.030387262919547728,\n\ \ \"acc_norm\": 0.34285714285714286,\n \"acc_norm_stderr\": 0.030387262919547728\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.472636815920398,\n\ \ \"acc_stderr\": 0.03530235517334682,\n \"acc_norm\": 0.472636815920398,\n\ \ \"acc_norm_stderr\": 0.03530235517334682\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n\ \ \"acc_stderr\": 0.03664314777288085,\n \"acc_norm\": 0.3313253012048193,\n\ \ \"acc_norm_stderr\": 0.03664314777288085\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.4853801169590643,\n \"acc_stderr\": 0.038331852752130205,\n\ \ \"acc_norm\": 0.4853801169590643,\n \"acc_norm_stderr\": 0.038331852752130205\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.014509045171487291,\n \"mc2\": 0.3433554241758255,\n\ \ \"mc2_stderr\": 0.01319092242364727\n }\n}\n```" repo_url: https://huggingface.co/AGI-inc/lora_moe_7b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|arc:challenge|25_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hellaswag|10_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:46:25.370436.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:46:25.370436.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T11_46_25.370436 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:46:25.370436.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:46:25.370436.parquet' - config_name: results data_files: - split: 2023_07_24T11_46_25.370436 path: - results_2023-07-24T11:46:25.370436.parquet - split: latest path: - results_2023-07-24T11:46:25.370436.parquet --- # Dataset Card for Evaluation run of AGI-inc/lora_moe_7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/AGI-inc/lora_moe_7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [AGI-inc/lora_moe_7b](https://huggingface.co/AGI-inc/lora_moe_7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AGI-inc__lora_moe_7b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-24T11:46:25.370436](https://huggingface.co/datasets/open-llm-leaderboard/details_AGI-inc__lora_moe_7b/blob/main/results_2023-07-24T11%3A46%3A25.370436.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.3624349655819883, "acc_stderr": 0.03457932037185986, "acc_norm": 0.36641755034742307, "acc_norm_stderr": 0.03456622803809125, "mc1": 0.22031823745410037, "mc1_stderr": 0.014509045171487291, "mc2": 0.3433554241758255, "mc2_stderr": 0.01319092242364727 }, "harness|arc:challenge|25": { "acc": 0.47696245733788395, "acc_stderr": 0.014595873205358267, "acc_norm": 0.5093856655290102, "acc_norm_stderr": 0.014608816322065 }, "harness|hellaswag|10": { "acc": 0.5754829715196176, "acc_stderr": 0.004932593348813628, "acc_norm": 0.7780322644891456, "acc_norm_stderr": 0.004147202539759587 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.34210526315789475, "acc_stderr": 0.03860731599316092, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3622641509433962, "acc_stderr": 0.0295822451283843, "acc_norm": 0.3622641509433962, "acc_norm_stderr": 0.0295822451283843 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3819444444444444, "acc_stderr": 0.040629907841466674, "acc_norm": 0.3819444444444444, "acc_norm_stderr": 0.040629907841466674 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.035676037996391685, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.035676037996391685 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171451, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171451 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.37446808510638296, "acc_stderr": 0.03163910665367291, "acc_norm": 0.37446808510638296, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2619047619047619, "acc_stderr": 0.022644212615525214, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.022644212615525214 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.038932596106046734, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.038932596106046734 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.33548387096774196, "acc_stderr": 0.02686020644472435, "acc_norm": 0.33548387096774196, "acc_norm_stderr": 0.02686020644472435 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.031785297106427496, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.031785297106427496 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.43636363636363634, "acc_stderr": 0.03872592983524754, "acc_norm": 0.43636363636363634, "acc_norm_stderr": 0.03872592983524754 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03358618145732522, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03358618145732522 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.45077720207253885, "acc_stderr": 0.03590910952235525, "acc_norm": 0.45077720207253885, "acc_norm_stderr": 0.03590910952235525 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.34102564102564104, "acc_stderr": 0.024035489676335065, "acc_norm": 0.34102564102564104, "acc_norm_stderr": 0.024035489676335065 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712173, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712173 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3277310924369748, "acc_stderr": 0.03048991141767323, "acc_norm": 0.3277310924369748, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.036030385453603854, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.036030385453603854 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.48440366972477067, "acc_stderr": 0.02142689153920805, "acc_norm": 0.48440366972477067, "acc_norm_stderr": 0.02142689153920805 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.03128039084329881, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.03128039084329881 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.35784313725490197, "acc_stderr": 0.03364487286088299, "acc_norm": 0.35784313725490197, "acc_norm_stderr": 0.03364487286088299 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.43037974683544306, "acc_stderr": 0.03223017195937598, "acc_norm": 0.43037974683544306, "acc_norm_stderr": 0.03223017195937598 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3991031390134529, "acc_stderr": 0.032867453125679603, "acc_norm": 0.3991031390134529, "acc_norm_stderr": 0.032867453125679603 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.3435114503816794, "acc_stderr": 0.041649760719448786, "acc_norm": 0.3435114503816794, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5206611570247934, "acc_stderr": 0.04560456086387235, "acc_norm": 0.5206611570247934, "acc_norm_stderr": 0.04560456086387235 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.038890666191127216, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.038890666191127216 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.042466243366976256, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.042466243366976256 }, "harness|hendrycksTest-management|5": { "acc": 0.33980582524271846, "acc_stderr": 0.04689765937278133, "acc_norm": 0.33980582524271846, "acc_norm_stderr": 0.04689765937278133 }, "harness|hendrycksTest-marketing|5": { "acc": 0.47863247863247865, "acc_stderr": 0.03272616447634954, "acc_norm": 0.47863247863247865, "acc_norm_stderr": 0.03272616447634954 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.4278416347381865, "acc_stderr": 0.01769278792780373, "acc_norm": 0.4278416347381865, "acc_norm_stderr": 0.01769278792780373 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.3901734104046243, "acc_stderr": 0.026261677607806653, "acc_norm": 0.3901734104046243, "acc_norm_stderr": 0.026261677607806653 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.3954248366013072, "acc_stderr": 0.027996723180631445, "acc_norm": 0.3954248366013072, "acc_norm_stderr": 0.027996723180631445 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3987138263665595, "acc_stderr": 0.0278093225857745, "acc_norm": 0.3987138263665595, "acc_norm_stderr": 0.0278093225857745 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.3487654320987654, "acc_stderr": 0.02651759772446501, "acc_norm": 0.3487654320987654, "acc_norm_stderr": 0.02651759772446501 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2730496453900709, "acc_stderr": 0.026577860943307857, "acc_norm": 0.2730496453900709, "acc_norm_stderr": 0.026577860943307857 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.29595827900912647, "acc_stderr": 0.011658518525277054, "acc_norm": 0.29595827900912647, "acc_norm_stderr": 0.011658518525277054 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4411764705882353, "acc_stderr": 0.030161911930767102, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.030161911930767102 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.35294117647058826, "acc_stderr": 0.01933314202079706, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.01933314202079706 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.0472457740573157, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.0472457740573157 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.34285714285714286, "acc_stderr": 0.030387262919547728, "acc_norm": 0.34285714285714286, "acc_norm_stderr": 0.030387262919547728 }, "harness|hendrycksTest-sociology|5": { "acc": 0.472636815920398, "acc_stderr": 0.03530235517334682, "acc_norm": 0.472636815920398, "acc_norm_stderr": 0.03530235517334682 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288085, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288085 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.4853801169590643, "acc_stderr": 0.038331852752130205, "acc_norm": 0.4853801169590643, "acc_norm_stderr": 0.038331852752130205 }, "harness|truthfulqa:mc|0": { "mc1": 0.22031823745410037, "mc1_stderr": 0.014509045171487291, "mc2": 0.3433554241758255, "mc2_stderr": 0.01319092242364727 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7290162444114685, -0.8713862299919128, 0.2528459131717682, 0.21035847067832947, -0.11899099498987198, -0.08288034051656723, 0.0413789227604866, -0.23407046496868134, 0.596625804901123, -0.07329314202070236, -0.48039308190345764, -0.6748018264770508, -0.4501185715198517, 0.2287597507238388, -0.04226469621062279, 0.8108503818511963, -0.203637033700943, -0.14278019964694977, 0.07284610718488693, -0.06926658004522324, -0.2479688823223114, -0.3398776650428772, -0.5186472535133362, -0.3505249321460724, 0.17231079936027527, 0.4287278652191162, 0.4192867875099182, 0.8237820267677307, 0.6776162385940552, 0.3051653504371643, -0.31188780069351196, -0.027388697490096092, -0.16821223497390747, -0.3215053081512451, 0.3958394229412079, -0.3698592782020569, -0.8676037192344666, 0.3284159004688263, 0.7649975419044495, 0.6317490935325623, -0.07417368143796921, 0.3047192394733429, 0.03251833841204643, 0.5844510793685913, -0.39240801334381104, 0.04249948635697365, -0.2895282804965973, 0.22252905368804932, -0.17422530055046082, -0.26833075284957886, -0.30368608236312866, -0.2385004758834839, -0.16007442772388458, -0.8664659261703491, 0.27054455876350403, 0.265485554933548, 1.5998132228851318, -0.13732776045799255, -0.24309803545475006, 0.08446227014064789, -0.11608800292015076, 1.0033239126205444, -0.8768860101699829, 0.32069680094718933, 0.7949301600456238, 0.11308476328849792, -0.1548433005809784, -0.5825027823448181, -0.6228693723678589, 0.1007598266005516, -0.3820125162601471, 0.36296385526657104, -0.0521351620554924, -0.22550268471240997, 0.37645986676216125, 0.6992264986038208, -0.6193360686302185, 0.16827629506587982, -0.6712027192115784, -0.1311144083738327, 1.0819944143295288, 0.3404262661933899, 0.06125186011195183, -0.3551245629787445, -0.6946699619293213, -0.6620240807533264, -0.4199599623680115, 0.30084219574928284, 0.4616955518722534, 0.3536660969257355, -0.4014285206794739, 0.6967788338661194, -0.4195593297481537, 0.5604177117347717, 0.44095057249069214, 0.044284991919994354, 0.9180731177330017, -0.6805527210235596, -0.5350213646888733, -0.06579045206308365, 1.133108139038086, 0.5526924729347229, 0.05080181360244751, 0.2454203963279724, 0.031193265691399574, -0.06054383143782616, 0.012197660282254219, -0.8601008057594299, -0.25360307097435, 0.15112824738025665, -0.3727823793888092, -0.5041438937187195, 0.3600080609321594, -0.8720494508743286, 0.1599537432193756, -0.029184682294726372, 0.41178658604621887, -0.5182920694351196, -0.08954205363988876, 0.21647906303405762, -0.39236125349998474, 0.8388262391090393, -0.20166152715682983, -0.8084071278572083, 0.3761603534221649, 0.510595977306366, 0.7598592638969421, -0.09718543291091919, -0.4358392059803009, -0.164197638630867, -0.09363067895174026, -0.2542913258075714, 0.5327939987182617, -0.3007044494152069, -0.45847707986831665, -0.29257503151893616, 0.29310956597328186, -0.29699352383613586, -0.33900073170661926, 0.7446942925453186, -0.19016556441783905, 0.22011081874370575, -0.42510688304901123, -0.628804624080658, 0.13732722401618958, 0.39757251739501953, -0.4148648977279663, 1.2705283164978027, 0.24626904726028442, -0.824038028717041, 0.4144154489040375, -0.6274899840354919, -0.17558683454990387, -0.057467974722385406, -0.0517389141023159, -0.7987809777259827, -0.2732519507408142, 0.21074530482292175, 0.41230157017707825, -0.11586520075798035, -0.12413321435451508, -0.3864424526691437, -0.3528757691383362, 0.34578853845596313, -0.1763555258512497, 1.1964185237884521, -0.006124209146946669, -0.7478606700897217, -0.09191475063562393, -1.2519512176513672, 0.3444242775440216, 0.2076583057641983, -0.3845932185649872, -0.2016887664794922, -0.4738330841064453, -0.05253252014517784, 0.15467694401741028, 0.2871362268924713, -0.7865003943443298, 0.3044990003108978, -0.3623976409435272, 0.20925748348236084, 1.2762507200241089, 0.010920190252363682, 0.15605002641677856, -0.5323425531387329, 0.5136504769325256, 0.22407220304012299, 0.21458829939365387, 0.36544105410575867, -0.5990922451019287, -0.8281381130218506, -0.4939629137516022, -0.024336619302630424, 0.6218476891517639, -0.20338986814022064, 1.1415308713912964, 0.038920700550079346, -0.889187753200531, -0.4550146460533142, -0.13682810962200165, 0.4968002438545227, 0.7946833968162537, 0.5842769742012024, -0.03191797807812691, -0.5900599360466003, -1.1113839149475098, -0.28219857811927795, -0.1645212322473526, 0.1627224087715149, 0.21565504372119904, 1.0524708032608032, -0.3001234531402588, 0.608916699886322, -1.040730595588684, -0.21441508829593658, 0.1868905872106552, -0.0308533888310194, 0.8272769451141357, 0.7430233955383301, 0.5922684073448181, -0.6880557537078857, -0.5592491626739502, 0.21211035549640656, -0.8936921954154968, -0.09461582452058792, 0.139521986246109, -0.310028076171875, 0.1478111892938614, 0.15054433047771454, -0.6892269253730774, 0.5460329651832581, 0.22149081528186798, -1.1126301288604736, 1.022070050239563, -0.342166930437088, 0.5832704305648804, -1.0262259244918823, 0.221449077129364, -0.04529959335923195, 0.040741804987192154, -0.4871945381164551, 0.045419078320264816, 0.08110301941633224, 0.45572030544281006, -0.47973304986953735, 0.8184846639633179, -0.6738279461860657, -0.059760306030511856, 0.47016972303390503, 0.12540532648563385, -0.11680933833122253, 0.3566204011440277, -0.25512611865997314, 0.8220676779747009, 0.7796102166175842, -0.4983436167240143, 0.5257186889648438, 0.4293707609176636, -0.21569888293743134, 0.7449043989181519, -0.48885276913642883, -0.3140155076980591, 0.3148441016674042, -0.0681496188044548, -0.790083646774292, -0.5009177923202515, 0.07110204547643661, -0.6038806438446045, -0.0996226817369461, 0.39436450600624084, -0.25502845644950867, -0.8111277222633362, -0.9403226375579834, 0.3178805708885193, 0.6973291039466858, -0.42365598678588867, -0.12481480836868286, 0.04847116023302078, 0.08323576301336288, -0.8538644313812256, -0.823896050453186, -0.5268148183822632, -0.25058361887931824, -0.7047659754753113, 0.2820429503917694, -0.2821866273880005, -0.28676116466522217, -0.05451088026165962, -0.22071906924247742, -0.3310283124446869, 0.016181033104658127, 0.146891787648201, 0.662364661693573, -0.4114796817302704, -0.3209932744503021, -0.26438137888908386, -0.18447349965572357, 0.24460922181606293, -0.05307137966156006, 0.36974433064460754, -0.5051564574241638, -0.4327532947063446, -0.40113821625709534, -0.0069578769616782665, 0.7014560699462891, -0.0892285704612732, 0.744050145149231, 0.43564414978027344, -0.27786946296691895, 0.010286343283951283, -0.2823280394077301, -0.23214131593704224, -0.5834282636642456, 0.26290813088417053, -0.5260084271430969, -1.0066349506378174, 0.8238581418991089, 0.5348846316337585, 0.0645553320646286, 1.1733757257461548, 0.5726666450500488, -0.2593458294868469, 1.0275485515594482, 0.034945689141750336, 0.2931172251701355, 0.3945784568786621, -0.7270691990852356, 0.10870680958032608, -0.9371439814567566, -0.29671019315719604, -0.5868765711784363, -0.5051937699317932, -0.7071428298950195, -0.052603304386138916, 0.2874469459056854, 0.17138908803462982, -0.6822518110275269, 0.5643011331558228, -0.8256556391716003, 0.5887829065322876, 0.6001775860786438, 0.23571765422821045, 0.1736527979373932, -0.15036985278129578, -0.3821086585521698, -0.06887663155794144, -0.4637550413608551, -0.2262946516275406, 1.2041568756103516, 0.27363482117652893, 0.7546951174736023, 0.07453709840774536, 0.8936514854431152, 0.08726947009563446, -0.06289629638195038, -0.5630936026573181, 0.6576516628265381, 0.10540752857923508, -0.7796440124511719, -0.42295461893081665, -0.5457653999328613, -1.1070245504379272, 0.3936592936515808, -0.11960078030824661, -0.8456435799598694, 0.1389540135860443, 0.035473600029945374, -0.19430363178253174, 0.4857577979564667, -0.6000027656555176, 0.8232149481773376, -0.13326260447502136, -0.49278831481933594, 0.10969790071249008, -0.814542829990387, 0.4470980763435364, 0.190568208694458, 0.26995477080345154, 0.046376779675483704, 0.27786514163017273, 1.150305151939392, -0.8313199877738953, 0.42361822724342346, 0.07511675357818604, 0.02610514499247074, 0.3545839786529541, -0.21675239503383636, 0.5202025175094604, 0.08018038421869278, -0.02387448400259018, -0.11769536882638931, 0.26850664615631104, -0.8519793748855591, -0.0558968186378479, 0.9705366492271423, -0.9686005711555481, -0.6050829291343689, -0.9054239988327026, -0.5142471790313721, 0.07390367239713669, 0.5692476630210876, 0.4100200831890106, 0.5242370963096619, 0.003953400533646345, 0.46387985348701477, 0.8360896706581116, -0.1214466243982315, 0.6163875460624695, 0.21791182458400726, 0.08382537961006165, -0.6610209345817566, 0.8481590151786804, 0.08658536523580551, 0.38323676586151123, 0.23337291181087494, 0.3756566345691681, -0.5362173914909363, -0.15974055230617523, -0.21515952050685883, 0.5132927894592285, -0.6493386626243591, -0.27387213706970215, -0.3656032383441925, -0.414641797542572, -0.7759352922439575, -0.6427064538002014, -0.3104079067707062, -0.4804794192314148, -0.49789223074913025, -0.47575148940086365, 0.578338086605072, 0.46311673521995544, -0.3943265676498413, 0.0477803498506546, -0.490619421005249, 0.2800499200820923, 0.33540230989456177, 0.5798498392105103, -0.4030854105949402, -0.5424361824989319, 0.04796904698014259, -0.10649575293064117, -0.5738019943237305, -0.9097484946250916, 0.3481844663619995, -0.019169561564922333, 0.5146142244338989, 0.6090537905693054, 0.06560905277729034, 0.8716762065887451, -0.20915888249874115, 1.0304204225540161, 0.33614662289619446, -0.7936922907829285, 0.7466840744018555, -0.33038800954818726, 0.2063305675983429, 0.6398703455924988, 0.1639215350151062, -0.15200579166412354, -0.6758987307548523, -1.3199840784072876, -0.830073893070221, 0.66617351770401, 0.40531429648399353, -0.2638239860534668, 0.036899589002132416, 0.12626215815544128, -0.2954941391944885, -0.19106058776378632, -0.7036670446395874, -0.8877134919166565, -0.14430201053619385, -0.5128344893455505, 0.08849995583295822, 0.03372269496321678, -0.40253782272338867, -0.8052085638046265, 0.9689134359359741, -0.010527725331485271, 0.6092373132705688, 0.4708240032196045, 0.08252795040607452, 0.056613482534885406, 0.5098180770874023, 0.9219476580619812, 0.7288632988929749, -0.4431845247745514, 0.3831999599933624, 0.40871816873550415, -1.037396788597107, 0.45192843675613403, 0.34354060888290405, -0.07969358563423157, -0.044461287558078766, 0.4646722972393036, 0.4674961566925049, 0.012211291119456291, -0.20254239439964294, 0.6135478615760803, -0.006087909918278456, -0.5542240738868713, -0.4191576838493347, 0.12680403888225555, -0.1389785259962082, 0.0048990678042173386, 0.3866608738899231, -0.16357672214508057, -0.04470917209982872, -0.5264133810997009, 0.451907753944397, 0.39702484011650085, -0.45847755670547485, -0.1597403734922409, 0.734332799911499, -0.16458700597286224, -0.1274522989988327, 0.331013023853302, -0.16423046588897705, -0.5989217162132263, 1.1251064538955688, 0.5841835141181946, 0.6732199788093567, -0.26874154806137085, -0.07570166885852814, 0.9199967384338379, 0.35326114296913147, -0.05059347301721573, 0.5189257860183716, 0.34825876355171204, -0.2356242835521698, 0.14809711277484894, -0.8688366413116455, -0.014393934048712254, 0.19195455312728882, -0.8481633067131042, 0.34322330355644226, -0.49293699860572815, -0.20528700947761536, 0.022596828639507294, 0.37710466980934143, -0.4298078417778015, 0.5349639654159546, -0.4189470410346985, 1.208414077758789, -0.9827476739883423, 0.6695756912231445, 0.7768021821975708, -0.5356946587562561, -1.028771162033081, -0.5553204417228699, -0.03959088772535324, -0.8047807812690735, 0.586146354675293, -0.016756044700741768, 0.1687096506357193, -0.06931480765342712, -0.7374013662338257, -0.9290872812271118, 1.3986302614212036, -0.07165773212909698, -0.41527754068374634, 0.22250418365001678, -0.029330994933843613, 0.44328850507736206, 0.13131770491600037, 0.5721949934959412, 0.7326434850692749, 0.8251714706420898, -0.06513156741857529, -0.7320314049720764, 0.2982301414012909, -0.564426600933075, -0.3525950014591217, 0.49359047412872314, -0.9217767715454102, 1.223712682723999, 0.013111288659274578, 0.21141944825649261, -0.16721194982528687, 0.6584890484809875, 0.7735985517501831, 0.30898112058639526, 0.36373794078826904, 0.9379419088363647, 0.829664945602417, -0.5199436545372009, 1.0343881845474243, -0.20811520516872406, 0.8645332455635071, 0.7194936275482178, 0.2170836478471756, 0.7870654463768005, 0.6746342778205872, -0.5280089378356934, 0.5494800209999084, 0.7762096524238586, -0.3381386995315552, 0.38179367780685425, 0.2444770187139511, -0.11337494105100632, -0.1392984539270401, 0.43246281147003174, -0.8968602418899536, 0.13136205077171326, 0.07257018238306046, -0.3529050350189209, 0.09356259554624557, -0.43308693170547485, 0.2949560284614563, -0.08531683683395386, -0.07309602946043015, 0.3670557141304016, 0.036302510648965836, -0.46260321140289307, 0.9537741541862488, -0.16883334517478943, 0.7514539957046509, -0.5514822602272034, -0.07633516937494278, -0.3840920031070709, 0.6392605900764465, -0.4241708517074585, -1.047357439994812, 0.1668616384267807, 0.08945224434137344, -0.14227238297462463, -0.15783058106899261, 0.7022865414619446, -0.1889985054731369, -0.797126054763794, 0.1277114897966385, 0.04854579642415047, 0.0761401429772377, 0.530921459197998, -0.6548739671707153, -0.3528002202510834, -0.05897187441587448, -0.5759820342063904, 0.11683870851993561, 0.31881630420684814, 0.26461488008499146, 0.5783568024635315, 0.642562747001648, 0.14537885785102844, 0.41614028811454773, -0.5698555707931519, 0.7968329787254333, -1.0525085926055908, -0.7541095018386841, -0.9272298216819763, 0.47852057218551636, -0.33851346373558044, -0.8536677956581116, 0.9847673177719116, 1.0427947044372559, 0.8782671689987183, -0.031683262437582016, 0.6359041929244995, -0.4027823507785797, 0.26352396607398987, -0.40562018752098083, 0.9339303970336914, -0.8515561819076538, -0.2337135523557663, -0.2388860583305359, -0.7100467681884766, -0.35232803225517273, 0.8472729921340942, -0.18153902888298035, 0.045864593237638474, 1.0529783964157104, 0.6170541048049927, -0.1295059472322464, 0.020844651386141777, -0.07790486514568329, 0.6077644228935242, 0.37354180216789246, 0.9798254370689392, 0.6369162201881409, -0.7988613247871399, 0.3206965923309326, -0.4707433581352234, -0.3944706916809082, -0.3992530405521393, -0.4536536633968353, -0.8593477010726929, -0.4822458326816559, -0.22699929773807526, -0.6035286784172058, -0.15219220519065857, 0.9595665335655212, 0.4320090115070343, -0.9233574867248535, -0.41132789850234985, -0.11527759581804276, 0.11774260550737381, -0.5701683163642883, -0.42198312282562256, 0.7339052557945251, -0.11741147190332413, -0.5867118239402771, 0.19205905497074127, -0.16870377957820892, 0.22040703892707825, 0.13023719191551208, -0.41635000705718994, -0.7509358525276184, 0.04146726429462433, 0.44041386246681213, 0.3498084545135498, -0.6854544281959534, -0.699053168296814, 0.2975309193134308, -0.5154287815093994, 0.44788944721221924, -0.05910896882414818, -0.531653106212616, 0.02660101093351841, 0.7013415098190308, 0.4737446904182434, 0.6362675428390503, -0.002087290631607175, 0.06815404444932938, -0.6684752702713013, 0.17658734321594238, -0.06138822063803673, 0.3017275631427765, -0.04505079239606857, -0.3216457664966583, 0.8395787477493286, 0.6595391035079956, -0.5156242251396179, -1.0729461908340454, -0.42817145586013794, -1.4452272653579712, -0.014521739445626736, 1.129972219467163, -0.013508466072380543, -0.5115581750869751, 0.3069019615650177, -0.1113104522228241, 0.17874814569950104, -0.3199191391468048, 0.7571427226066589, 0.8079816102981567, -0.3577428162097931, 0.15447042882442474, -0.6421144008636475, 0.32735684514045715, 0.511777400970459, -1.2283300161361694, -0.11513275653123856, 0.25116708874702454, 0.30859044194221497, 0.39762574434280396, 0.6128644347190857, -0.08973255008459091, 0.2343117594718933, 0.20650093257427216, 0.018041765317320824, -0.019654924049973488, 0.0791952982544899, -0.2227502018213272, 0.09112875163555145, -0.2855445146560669, -0.46317678689956665 ]
open-llm-leaderboard/details_AGI-inc__lora_moe_7b_baseline
open-llm-leaderboard
2023-08-27T12:38:06Z
201
0
[ "region:us" ]
null
2023-08-18T11:54:24Z
--- pretty_name: Evaluation run of AGI-inc/lora_moe_7b_baseline dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [AGI-inc/lora_moe_7b_baseline](https://huggingface.co/AGI-inc/lora_moe_7b_baseline)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AGI-inc__lora_moe_7b_baseline\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-24T11:38:46.147581](https://huggingface.co/datasets/open-llm-leaderboard/details_AGI-inc__lora_moe_7b_baseline/blob/main/results_2023-07-24T11%3A38%3A46.147581.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3624349655819883,\n\ \ \"acc_stderr\": 0.03457932037185986,\n \"acc_norm\": 0.36641755034742307,\n\ \ \"acc_norm_stderr\": 0.03456622803809125,\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.014509045171487291,\n \"mc2\": 0.3433554241758255,\n\ \ \"mc2_stderr\": 0.01319092242364727\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.47696245733788395,\n \"acc_stderr\": 0.014595873205358267,\n\ \ \"acc_norm\": 0.5093856655290102,\n \"acc_norm_stderr\": 0.014608816322065\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5754829715196176,\n\ \ \"acc_stderr\": 0.004932593348813628,\n \"acc_norm\": 0.7780322644891456,\n\ \ \"acc_norm_stderr\": 0.004147202539759587\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3851851851851852,\n\ \ \"acc_stderr\": 0.042039210401562783,\n \"acc_norm\": 0.3851851851851852,\n\ \ \"acc_norm_stderr\": 0.042039210401562783\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.03860731599316092,\n\ \ \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.03860731599316092\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.41,\n\ \ \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.3622641509433962,\n \"acc_stderr\": 0.0295822451283843,\n\ \ \"acc_norm\": 0.3622641509433962,\n \"acc_norm_stderr\": 0.0295822451283843\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3819444444444444,\n\ \ \"acc_stderr\": 0.040629907841466674,\n \"acc_norm\": 0.3819444444444444,\n\ \ \"acc_norm_stderr\": 0.040629907841466674\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3236994219653179,\n\ \ \"acc_stderr\": 0.035676037996391685,\n \"acc_norm\": 0.3236994219653179,\n\ \ \"acc_norm_stderr\": 0.035676037996391685\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171451,\n\ \ \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171451\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"\ acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.37446808510638296,\n \"acc_stderr\": 0.03163910665367291,\n\ \ \"acc_norm\": 0.37446808510638296,\n \"acc_norm_stderr\": 0.03163910665367291\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n\ \ \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2619047619047619,\n \"acc_stderr\": 0.022644212615525214,\n \"\ acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.022644212615525214\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n\ \ \"acc_stderr\": 0.038932596106046734,\n \"acc_norm\": 0.25396825396825395,\n\ \ \"acc_norm_stderr\": 0.038932596106046734\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.33548387096774196,\n\ \ \"acc_stderr\": 0.02686020644472435,\n \"acc_norm\": 0.33548387096774196,\n\ \ \"acc_norm_stderr\": 0.02686020644472435\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.031785297106427496,\n\ \ \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.031785297106427496\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\"\ : 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.43636363636363634,\n \"acc_stderr\": 0.03872592983524754,\n\ \ \"acc_norm\": 0.43636363636363634,\n \"acc_norm_stderr\": 0.03872592983524754\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.3333333333333333,\n \"acc_stderr\": 0.03358618145732522,\n \"\ acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.03358618145732522\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.45077720207253885,\n \"acc_stderr\": 0.03590910952235525,\n\ \ \"acc_norm\": 0.45077720207253885,\n \"acc_norm_stderr\": 0.03590910952235525\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.34102564102564104,\n \"acc_stderr\": 0.024035489676335065,\n\ \ \"acc_norm\": 0.34102564102564104,\n \"acc_norm_stderr\": 0.024035489676335065\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712173,\n \ \ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712173\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.3277310924369748,\n \"acc_stderr\": 0.03048991141767323,\n \ \ \"acc_norm\": 0.3277310924369748,\n \"acc_norm_stderr\": 0.03048991141767323\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.26490066225165565,\n \"acc_stderr\": 0.036030385453603854,\n \"\ acc_norm\": 0.26490066225165565,\n \"acc_norm_stderr\": 0.036030385453603854\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.48440366972477067,\n \"acc_stderr\": 0.02142689153920805,\n \"\ acc_norm\": 0.48440366972477067,\n \"acc_norm_stderr\": 0.02142689153920805\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.30092592592592593,\n \"acc_stderr\": 0.03128039084329881,\n \"\ acc_norm\": 0.30092592592592593,\n \"acc_norm_stderr\": 0.03128039084329881\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.35784313725490197,\n \"acc_stderr\": 0.03364487286088299,\n \"\ acc_norm\": 0.35784313725490197,\n \"acc_norm_stderr\": 0.03364487286088299\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.43037974683544306,\n \"acc_stderr\": 0.03223017195937598,\n \ \ \"acc_norm\": 0.43037974683544306,\n \"acc_norm_stderr\": 0.03223017195937598\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3991031390134529,\n\ \ \"acc_stderr\": 0.032867453125679603,\n \"acc_norm\": 0.3991031390134529,\n\ \ \"acc_norm_stderr\": 0.032867453125679603\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.3435114503816794,\n \"acc_stderr\": 0.041649760719448786,\n\ \ \"acc_norm\": 0.3435114503816794,\n \"acc_norm_stderr\": 0.041649760719448786\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5206611570247934,\n \"acc_stderr\": 0.04560456086387235,\n \"\ acc_norm\": 0.5206611570247934,\n \"acc_norm_stderr\": 0.04560456086387235\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4166666666666667,\n\ \ \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.4166666666666667,\n\ \ \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.4294478527607362,\n \"acc_stderr\": 0.038890666191127216,\n\ \ \"acc_norm\": 0.4294478527607362,\n \"acc_norm_stderr\": 0.038890666191127216\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\ \ \"acc_stderr\": 0.042466243366976256,\n \"acc_norm\": 0.2767857142857143,\n\ \ \"acc_norm_stderr\": 0.042466243366976256\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.33980582524271846,\n \"acc_stderr\": 0.04689765937278133,\n\ \ \"acc_norm\": 0.33980582524271846,\n \"acc_norm_stderr\": 0.04689765937278133\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.47863247863247865,\n\ \ \"acc_stderr\": 0.03272616447634954,\n \"acc_norm\": 0.47863247863247865,\n\ \ \"acc_norm_stderr\": 0.03272616447634954\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.4278416347381865,\n\ \ \"acc_stderr\": 0.01769278792780373,\n \"acc_norm\": 0.4278416347381865,\n\ \ \"acc_norm_stderr\": 0.01769278792780373\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.3901734104046243,\n \"acc_stderr\": 0.026261677607806653,\n\ \ \"acc_norm\": 0.3901734104046243,\n \"acc_norm_stderr\": 0.026261677607806653\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.3954248366013072,\n \"acc_stderr\": 0.027996723180631445,\n\ \ \"acc_norm\": 0.3954248366013072,\n \"acc_norm_stderr\": 0.027996723180631445\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3987138263665595,\n\ \ \"acc_stderr\": 0.0278093225857745,\n \"acc_norm\": 0.3987138263665595,\n\ \ \"acc_norm_stderr\": 0.0278093225857745\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.3487654320987654,\n \"acc_stderr\": 0.02651759772446501,\n\ \ \"acc_norm\": 0.3487654320987654,\n \"acc_norm_stderr\": 0.02651759772446501\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2730496453900709,\n \"acc_stderr\": 0.026577860943307857,\n \ \ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.026577860943307857\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.29595827900912647,\n\ \ \"acc_stderr\": 0.011658518525277054,\n \"acc_norm\": 0.29595827900912647,\n\ \ \"acc_norm_stderr\": 0.011658518525277054\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.030161911930767102,\n\ \ \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.030161911930767102\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.35294117647058826,\n \"acc_stderr\": 0.01933314202079706,\n \ \ \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.01933314202079706\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.41818181818181815,\n\ \ \"acc_stderr\": 0.0472457740573157,\n \"acc_norm\": 0.41818181818181815,\n\ \ \"acc_norm_stderr\": 0.0472457740573157\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.34285714285714286,\n \"acc_stderr\": 0.030387262919547728,\n\ \ \"acc_norm\": 0.34285714285714286,\n \"acc_norm_stderr\": 0.030387262919547728\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.472636815920398,\n\ \ \"acc_stderr\": 0.03530235517334682,\n \"acc_norm\": 0.472636815920398,\n\ \ \"acc_norm_stderr\": 0.03530235517334682\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n\ \ \"acc_stderr\": 0.03664314777288085,\n \"acc_norm\": 0.3313253012048193,\n\ \ \"acc_norm_stderr\": 0.03664314777288085\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.4853801169590643,\n \"acc_stderr\": 0.038331852752130205,\n\ \ \"acc_norm\": 0.4853801169590643,\n \"acc_norm_stderr\": 0.038331852752130205\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.014509045171487291,\n \"mc2\": 0.3433554241758255,\n\ \ \"mc2_stderr\": 0.01319092242364727\n }\n}\n```" repo_url: https://huggingface.co/AGI-inc/lora_moe_7b_baseline leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|arc:challenge|25_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hellaswag|10_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:46.147581.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:46.147581.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T11_38_46.147581 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:38:46.147581.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:38:46.147581.parquet' - config_name: results data_files: - split: 2023_07_24T11_38_46.147581 path: - results_2023-07-24T11:38:46.147581.parquet - split: latest path: - results_2023-07-24T11:38:46.147581.parquet --- # Dataset Card for Evaluation run of AGI-inc/lora_moe_7b_baseline ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/AGI-inc/lora_moe_7b_baseline - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [AGI-inc/lora_moe_7b_baseline](https://huggingface.co/AGI-inc/lora_moe_7b_baseline) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AGI-inc__lora_moe_7b_baseline", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-24T11:38:46.147581](https://huggingface.co/datasets/open-llm-leaderboard/details_AGI-inc__lora_moe_7b_baseline/blob/main/results_2023-07-24T11%3A38%3A46.147581.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.3624349655819883, "acc_stderr": 0.03457932037185986, "acc_norm": 0.36641755034742307, "acc_norm_stderr": 0.03456622803809125, "mc1": 0.22031823745410037, "mc1_stderr": 0.014509045171487291, "mc2": 0.3433554241758255, "mc2_stderr": 0.01319092242364727 }, "harness|arc:challenge|25": { "acc": 0.47696245733788395, "acc_stderr": 0.014595873205358267, "acc_norm": 0.5093856655290102, "acc_norm_stderr": 0.014608816322065 }, "harness|hellaswag|10": { "acc": 0.5754829715196176, "acc_stderr": 0.004932593348813628, "acc_norm": 0.7780322644891456, "acc_norm_stderr": 0.004147202539759587 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.34210526315789475, "acc_stderr": 0.03860731599316092, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3622641509433962, "acc_stderr": 0.0295822451283843, "acc_norm": 0.3622641509433962, "acc_norm_stderr": 0.0295822451283843 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3819444444444444, "acc_stderr": 0.040629907841466674, "acc_norm": 0.3819444444444444, "acc_norm_stderr": 0.040629907841466674 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.035676037996391685, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.035676037996391685 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171451, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171451 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.37446808510638296, "acc_stderr": 0.03163910665367291, "acc_norm": 0.37446808510638296, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2619047619047619, "acc_stderr": 0.022644212615525214, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.022644212615525214 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.038932596106046734, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.038932596106046734 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.33548387096774196, "acc_stderr": 0.02686020644472435, "acc_norm": 0.33548387096774196, "acc_norm_stderr": 0.02686020644472435 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.031785297106427496, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.031785297106427496 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.43636363636363634, "acc_stderr": 0.03872592983524754, "acc_norm": 0.43636363636363634, "acc_norm_stderr": 0.03872592983524754 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03358618145732522, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03358618145732522 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.45077720207253885, "acc_stderr": 0.03590910952235525, "acc_norm": 0.45077720207253885, "acc_norm_stderr": 0.03590910952235525 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.34102564102564104, "acc_stderr": 0.024035489676335065, "acc_norm": 0.34102564102564104, "acc_norm_stderr": 0.024035489676335065 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712173, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712173 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3277310924369748, "acc_stderr": 0.03048991141767323, "acc_norm": 0.3277310924369748, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.036030385453603854, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.036030385453603854 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.48440366972477067, "acc_stderr": 0.02142689153920805, "acc_norm": 0.48440366972477067, "acc_norm_stderr": 0.02142689153920805 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.03128039084329881, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.03128039084329881 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.35784313725490197, "acc_stderr": 0.03364487286088299, "acc_norm": 0.35784313725490197, "acc_norm_stderr": 0.03364487286088299 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.43037974683544306, "acc_stderr": 0.03223017195937598, "acc_norm": 0.43037974683544306, "acc_norm_stderr": 0.03223017195937598 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3991031390134529, "acc_stderr": 0.032867453125679603, "acc_norm": 0.3991031390134529, "acc_norm_stderr": 0.032867453125679603 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.3435114503816794, "acc_stderr": 0.041649760719448786, "acc_norm": 0.3435114503816794, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5206611570247934, "acc_stderr": 0.04560456086387235, "acc_norm": 0.5206611570247934, "acc_norm_stderr": 0.04560456086387235 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.038890666191127216, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.038890666191127216 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.042466243366976256, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.042466243366976256 }, "harness|hendrycksTest-management|5": { "acc": 0.33980582524271846, "acc_stderr": 0.04689765937278133, "acc_norm": 0.33980582524271846, "acc_norm_stderr": 0.04689765937278133 }, "harness|hendrycksTest-marketing|5": { "acc": 0.47863247863247865, "acc_stderr": 0.03272616447634954, "acc_norm": 0.47863247863247865, "acc_norm_stderr": 0.03272616447634954 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.4278416347381865, "acc_stderr": 0.01769278792780373, "acc_norm": 0.4278416347381865, "acc_norm_stderr": 0.01769278792780373 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.3901734104046243, "acc_stderr": 0.026261677607806653, "acc_norm": 0.3901734104046243, "acc_norm_stderr": 0.026261677607806653 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.3954248366013072, "acc_stderr": 0.027996723180631445, "acc_norm": 0.3954248366013072, "acc_norm_stderr": 0.027996723180631445 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3987138263665595, "acc_stderr": 0.0278093225857745, "acc_norm": 0.3987138263665595, "acc_norm_stderr": 0.0278093225857745 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.3487654320987654, "acc_stderr": 0.02651759772446501, "acc_norm": 0.3487654320987654, "acc_norm_stderr": 0.02651759772446501 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2730496453900709, "acc_stderr": 0.026577860943307857, "acc_norm": 0.2730496453900709, "acc_norm_stderr": 0.026577860943307857 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.29595827900912647, "acc_stderr": 0.011658518525277054, "acc_norm": 0.29595827900912647, "acc_norm_stderr": 0.011658518525277054 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4411764705882353, "acc_stderr": 0.030161911930767102, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.030161911930767102 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.35294117647058826, "acc_stderr": 0.01933314202079706, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.01933314202079706 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.0472457740573157, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.0472457740573157 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.34285714285714286, "acc_stderr": 0.030387262919547728, "acc_norm": 0.34285714285714286, "acc_norm_stderr": 0.030387262919547728 }, "harness|hendrycksTest-sociology|5": { "acc": 0.472636815920398, "acc_stderr": 0.03530235517334682, "acc_norm": 0.472636815920398, "acc_norm_stderr": 0.03530235517334682 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288085, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288085 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.4853801169590643, "acc_stderr": 0.038331852752130205, "acc_norm": 0.4853801169590643, "acc_norm_stderr": 0.038331852752130205 }, "harness|truthfulqa:mc|0": { "mc1": 0.22031823745410037, "mc1_stderr": 0.014509045171487291, "mc2": 0.3433554241758255, "mc2_stderr": 0.01319092242364727 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7306333184242249, -0.8697482347488403, 0.2574756443500519, 0.20400816202163696, -0.11112868040800095, -0.08402299135923386, 0.04428825154900551, -0.2348218709230423, 0.5882548689842224, -0.06240455061197281, -0.48231303691864014, -0.6714172959327698, -0.4379767179489136, 0.2232760339975357, -0.037561506032943726, 0.8066118955612183, -0.20232832431793213, -0.14182139933109283, 0.07674015313386917, -0.07353873550891876, -0.2569747269153595, -0.33677834272384644, -0.5086397528648376, -0.35172948241233826, 0.16188696026802063, 0.44049760699272156, 0.4181005358695984, 0.8277707099914551, 0.6760526299476624, 0.30409038066864014, -0.31630831956863403, -0.038112785667181015, -0.1736561506986618, -0.32258152961730957, 0.3901735842227936, -0.35552069544792175, -0.8655087947845459, 0.3292798399925232, 0.7635514736175537, 0.6390932202339172, -0.07365595549345016, 0.31369248032569885, 0.04400758072733879, 0.5872746706008911, -0.40113258361816406, 0.04124710336327553, -0.2888824939727783, 0.219206765294075, -0.17039044201374054, -0.26899829506874084, -0.3025578260421753, -0.22829723358154297, -0.1574980914592743, -0.8679884672164917, 0.2683631181716919, 0.2650282680988312, 1.613661527633667, -0.1325056254863739, -0.23998980224132538, 0.0919547826051712, -0.11360689997673035, 0.9960642457008362, -0.8810005187988281, 0.322232723236084, 0.7902454137802124, 0.10924684256315231, -0.1532103419303894, -0.5850290656089783, -0.6137000918388367, 0.08378137648105621, -0.3847092092037201, 0.3657008111476898, -0.05903661996126175, -0.2165965437889099, 0.38262054324150085, 0.7073256373405457, -0.6173343658447266, 0.16645371913909912, -0.6722097396850586, -0.13003791868686676, 1.0597583055496216, 0.3358185589313507, 0.06386549025774002, -0.35835662484169006, -0.6984857320785522, -0.6468126177787781, -0.42647457122802734, 0.3036845326423645, 0.4616125822067261, 0.3586118817329407, -0.404893696308136, 0.7006102204322815, -0.4338056743144989, 0.557888388633728, 0.4519777297973633, 0.04849320650100708, 0.9160626530647278, -0.6846807599067688, -0.5383802056312561, -0.06792603433132172, 1.1230790615081787, 0.5579217672348022, 0.046490173786878586, 0.24373759329319, 0.04241614043712616, -0.055995360016822815, 0.015220425091683865, -0.8654088377952576, -0.2492690086364746, 0.1563401073217392, -0.3717745244503021, -0.49633583426475525, 0.355620801448822, -0.8625876307487488, 0.16992142796516418, -0.038365013897418976, 0.405720055103302, -0.5283539295196533, -0.08176935464143753, 0.20926214754581451, -0.3953118920326233, 0.8416022062301636, -0.2058669626712799, -0.7986197471618652, 0.3760981559753418, 0.5013896822929382, 0.7552287578582764, -0.08732028305530548, -0.4365136921405792, -0.16445310413837433, -0.0852479413151741, -0.25323984026908875, 0.5323264002799988, -0.290964812040329, -0.45224812626838684, -0.291452556848526, 0.2921231687068939, -0.27810195088386536, -0.3393435478210449, 0.7456095814704895, -0.1750865876674652, 0.21977569162845612, -0.4354269504547119, -0.626346230506897, 0.14642475545406342, 0.39951640367507935, -0.40618640184402466, 1.2806349992752075, 0.2479907125234604, -0.8296352028846741, 0.4166158437728882, -0.6172491908073425, -0.17706842720508575, -0.04486333578824997, -0.048384878784418106, -0.783936619758606, -0.2698149085044861, 0.20991423726081848, 0.4152948260307312, -0.11272455751895905, -0.1211373433470726, -0.38867539167404175, -0.3547389507293701, 0.3287286162376404, -0.16776952147483826, 1.1999210119247437, -0.0026054654736071825, -0.7480143308639526, -0.0954681858420372, -1.2529892921447754, 0.34009602665901184, 0.20157337188720703, -0.3856911361217499, -0.20488080382347107, -0.4706149399280548, -0.05010505020618439, 0.1533234715461731, 0.28768688440322876, -0.787509560585022, 0.3001348078250885, -0.3589276075363159, 0.207930788397789, 1.2640002965927124, 0.015139250084757805, 0.16002710163593292, -0.5285736918449402, 0.49977990984916687, 0.21038304269313812, 0.21410499513149261, 0.3632672131061554, -0.5817298293113708, -0.8310775756835938, -0.4979134500026703, -0.035353634506464005, 0.6253976821899414, -0.20903931558132172, 1.1505182981491089, 0.039222266525030136, -0.888227105140686, -0.4506680965423584, -0.13526949286460876, 0.5018454790115356, 0.8090036511421204, 0.5926442742347717, -0.03109295852482319, -0.5943448543548584, -1.1131104230880737, -0.2810996174812317, -0.17413330078125, 0.15677516162395477, 0.216899111866951, 1.0444488525390625, -0.3001018166542053, 0.6067063212394714, -1.03596031665802, -0.2161819040775299, 0.17850945889949799, -0.030683454126119614, 0.8227794170379639, 0.7459133863449097, 0.5947672724723816, -0.6770471334457397, -0.5575340390205383, 0.2042006403207779, -0.8916715979576111, -0.11006126552820206, 0.14318428933620453, -0.3085184097290039, 0.13724182546138763, 0.16369439661502838, -0.6792470216751099, 0.5372454524040222, 0.217713862657547, -1.107326865196228, 1.0240840911865234, -0.34155920147895813, 0.5854211449623108, -1.0337163209915161, 0.2245156615972519, -0.03401634469628334, 0.036737773567438126, -0.49759534001350403, 0.05672518163919449, 0.08264902979135513, 0.45112344622612, -0.48375049233436584, 0.8174278736114502, -0.677733302116394, -0.06126638129353523, 0.46684980392456055, 0.11837227642536163, -0.11316149681806564, 0.3547738492488861, -0.25478410720825195, 0.8202041983604431, 0.7756257057189941, -0.49356937408447266, 0.5077835917472839, 0.42706385254859924, -0.2174970805644989, 0.7537034153938293, -0.49068740010261536, -0.32363277673721313, 0.317618191242218, -0.056863702833652496, -0.7953251600265503, -0.5018517971038818, 0.06553896516561508, -0.6012857556343079, -0.10033097863197327, 0.3849683105945587, -0.2501051127910614, -0.8091577887535095, -0.933489978313446, 0.3141843378543854, 0.6939742565155029, -0.421165406703949, -0.12557224929332733, 0.051262978464365005, 0.07810703665018082, -0.8606570959091187, -0.82586270570755, -0.5235500335693359, -0.2530386745929718, -0.6977036595344543, 0.2877218723297119, -0.27380773425102234, -0.2863856554031372, -0.05585040897130966, -0.22560705244541168, -0.32983583211898804, 0.014535768888890743, 0.14911234378814697, 0.6682326793670654, -0.40831369161605835, -0.33305901288986206, -0.26507067680358887, -0.17980779707431793, 0.2358444631099701, -0.05149255320429802, 0.37349560856819153, -0.5097748637199402, -0.4225682318210602, -0.4094741642475128, -0.005954280961304903, 0.7024657726287842, -0.08218877017498016, 0.7362388968467712, 0.4387432038784027, -0.2826598584651947, 0.018445519730448723, -0.29569435119628906, -0.22567926347255707, -0.5818267464637756, 0.2647067606449127, -0.5205230116844177, -1.0027117729187012, 0.8251640200614929, 0.5311198234558105, 0.06007095053792, 1.1744710206985474, 0.5735291242599487, -0.26832544803619385, 1.019453763961792, 0.042856909334659576, 0.2868252396583557, 0.4022367596626282, -0.7317207455635071, 0.0956415981054306, -0.9465693831443787, -0.2848452627658844, -0.5923035144805908, -0.4910590648651123, -0.698040246963501, -0.04417867958545685, 0.28355905413627625, 0.18553216755390167, -0.6931385397911072, 0.5739647746086121, -0.8225150108337402, 0.5897438526153564, 0.592589259147644, 0.23295727372169495, 0.16604523360729218, -0.1534651219844818, -0.3860764801502228, -0.05849987268447876, -0.46158626675605774, -0.23262812197208405, 1.1860958337783813, 0.27459073066711426, 0.7418063282966614, 0.06175018846988678, 0.8909281492233276, 0.09040701389312744, -0.05993695929646492, -0.5577523708343506, 0.6567732095718384, 0.10235005617141724, -0.7802489399909973, -0.4225270748138428, -0.5413205623626709, -1.1000633239746094, 0.39485472440719604, -0.1207321360707283, -0.8580013513565063, 0.14979709684848785, 0.0425681471824646, -0.1907850056886673, 0.4817482829093933, -0.5994132161140442, 0.8308350443840027, -0.12431222945451736, -0.489157110452652, 0.1114092767238617, -0.8162003755569458, 0.4429464638233185, 0.21077723801136017, 0.2689184546470642, 0.03655887022614479, 0.2777024209499359, 1.1407208442687988, -0.8334323167800903, 0.4265229105949402, 0.06884176284074783, 0.03823712840676308, 0.35025596618652344, -0.21974004805088043, 0.515649676322937, 0.07669363915920258, -0.021579308435320854, -0.10915220528841019, 0.2736141085624695, -0.8631687760353088, -0.04252839460968971, 0.9601919651031494, -0.9664020538330078, -0.6025550365447998, -0.8981298208236694, -0.5152589678764343, 0.06679454445838928, 0.5664987564086914, 0.42346808314323425, 0.5198860168457031, -0.010207933373749256, 0.47286704182624817, 0.8215384483337402, -0.1143125593662262, 0.6099353432655334, 0.22089628875255585, 0.09800506383180618, -0.6589382886886597, 0.8328384757041931, 0.08704955875873566, 0.38893041014671326, 0.23230397701263428, 0.38205066323280334, -0.5257900953292847, -0.16935421526432037, -0.21841222047805786, 0.5009641647338867, -0.6513904929161072, -0.2762259542942047, -0.36272308230400085, -0.4125172197818756, -0.778287947177887, -0.64901202917099, -0.31498783826828003, -0.47026559710502625, -0.49209028482437134, -0.47348839044570923, 0.5651513338088989, 0.4511684775352478, -0.3951805531978607, 0.05084548518061638, -0.4894445240497589, 0.28745725750923157, 0.32515791058540344, 0.5697358846664429, -0.4018520712852478, -0.5454002618789673, 0.04494219273328781, -0.10739962011575699, -0.5750893950462341, -0.9144058227539062, 0.34310388565063477, -0.023006588220596313, 0.5188595056533813, 0.6046605706214905, 0.07108777016401291, 0.8821902275085449, -0.21358360350131989, 1.0313260555267334, 0.3385709226131439, -0.8044912815093994, 0.747372567653656, -0.33207276463508606, 0.21257588267326355, 0.6513950228691101, 0.16397079825401306, -0.15306666493415833, -0.6784865260124207, -1.3114014863967896, -0.8432425260543823, 0.6617875099182129, 0.4070553183555603, -0.26490381360054016, 0.03707963600754738, 0.13450397551059723, -0.30047905445098877, -0.19698525965213776, -0.7090473771095276, -0.8820752501487732, -0.14575046300888062, -0.5022801756858826, 0.08912783116102219, 0.03436761349439621, -0.40253135561943054, -0.8042786121368408, 0.9668862819671631, -0.014618758112192154, 0.5972850322723389, 0.47807577252388, 0.07528533786535263, 0.05738719180226326, 0.5142776966094971, 0.9059441685676575, 0.7234848737716675, -0.4508911371231079, 0.3713422119617462, 0.4162488877773285, -1.0308321714401245, 0.4518663287162781, 0.33998018503189087, -0.07957980036735535, -0.05112196505069733, 0.46898528933525085, 0.4743020832538605, 0.01336186844855547, -0.19570878148078918, 0.608927845954895, 0.0027882938738912344, -0.5515121221542358, -0.41845959424972534, 0.11840427666902542, -0.1398295909166336, 0.00969872809946537, 0.3935677707195282, -0.16539697349071503, -0.0376521497964859, -0.5403885841369629, 0.4461837112903595, 0.391733855009079, -0.46678242087364197, -0.15303850173950195, 0.7273098826408386, -0.16767238080501556, -0.12205789238214493, 0.32801711559295654, -0.1610136181116104, -0.5927491784095764, 1.1219536066055298, 0.5787455439567566, 0.6798791289329529, -0.27003082633018494, -0.07905532419681549, 0.9176005721092224, 0.3487885892391205, -0.05052755028009415, 0.5149556398391724, 0.3414832651615143, -0.24229936301708221, 0.1405961811542511, -0.8677717447280884, -0.020324820652604103, 0.19912931323051453, -0.8644826412200928, 0.3424997329711914, -0.48985400795936584, -0.20559075474739075, 0.026246972382068634, 0.37272337079048157, -0.43610233068466187, 0.5342804789543152, -0.4159204661846161, 1.2101190090179443, -0.9866266250610352, 0.6748111844062805, 0.774204432964325, -0.5406090617179871, -1.0267555713653564, -0.5382287502288818, -0.03583652526140213, -0.8037124276161194, 0.5814019441604614, -0.013400270603597164, 0.1680050939321518, -0.07287292927503586, -0.7339000105857849, -0.9289299845695496, 1.4021536111831665, -0.063468798995018, -0.41421619057655334, 0.21194736659526825, -0.03354405239224434, 0.4501339793205261, 0.12576104700565338, 0.5703957676887512, 0.7386505007743835, 0.8250020146369934, -0.057913921773433685, -0.7300542593002319, 0.3049046993255615, -0.5591937899589539, -0.34717148542404175, 0.4817297160625458, -0.9204434156417847, 1.2146553993225098, 0.021082023158669472, 0.20786622166633606, -0.15963922441005707, 0.6572430729866028, 0.7755205631256104, 0.300464928150177, 0.36062338948249817, 0.9418354034423828, 0.8181461095809937, -0.5201046466827393, 1.034299373626709, -0.2161252647638321, 0.8613072037696838, 0.7203225493431091, 0.20664265751838684, 0.7934435606002808, 0.6767404675483704, -0.5235390663146973, 0.5466076135635376, 0.7745537757873535, -0.34037408232688904, 0.3816811144351959, 0.24676525592803955, -0.10068591684103012, -0.12974813580513, 0.4263370633125305, -0.8984881639480591, 0.12751629948616028, 0.07573196291923523, -0.33606797456741333, 0.08776704967021942, -0.43526628613471985, 0.2923256456851959, -0.08656329661607742, -0.08022459596395493, 0.37224382162094116, 0.028154639527201653, -0.46179279685020447, 0.9578249454498291, -0.17249718308448792, 0.7500931620597839, -0.5488249063491821, -0.06899650394916534, -0.38018321990966797, 0.6344158053398132, -0.421560674905777, -1.0462085008621216, 0.1713990420103073, 0.09015609323978424, -0.14268165826797485, -0.15757828950881958, 0.703020453453064, -0.18414075672626495, -0.7892798185348511, 0.1338571459054947, 0.051048584282398224, 0.07017751783132553, 0.5273308753967285, -0.6582015156745911, -0.3543158173561096, -0.0641380026936531, -0.5755897760391235, 0.119343601167202, 0.312281996011734, 0.266967236995697, 0.5734133720397949, 0.6385044455528259, 0.14764825999736786, 0.4075148105621338, -0.5666418075561523, 0.7969307899475098, -1.0482698678970337, -0.7559024691581726, -0.9186417460441589, 0.48959824442863464, -0.34382370114326477, -0.8518534302711487, 0.9803646802902222, 1.0430898666381836, 0.8823980093002319, -0.028733134269714355, 0.6368367075920105, -0.40619200468063354, 0.26587218046188354, -0.4083807170391083, 0.9307701587677002, -0.8524832725524902, -0.23409512639045715, -0.24491587281227112, -0.7162462472915649, -0.34514138102531433, 0.839862048625946, -0.17754529416561127, 0.04046909883618355, 1.050374984741211, 0.6129987835884094, -0.13079755008220673, 0.007133789360523224, -0.07282169908285141, 0.6126953959465027, 0.36840322613716125, 0.9686568379402161, 0.6348731517791748, -0.7935121059417725, 0.3226959705352783, -0.46930229663848877, -0.3970893323421478, -0.3995846211910248, -0.4568259119987488, -0.8585629463195801, -0.485898494720459, -0.22401636838912964, -0.6077229976654053, -0.14072071015834808, 0.949002742767334, 0.4385835826396942, -0.9273450374603271, -0.4214119017124176, -0.12024456262588501, 0.10356301069259644, -0.5679342746734619, -0.42128485441207886, 0.7290744185447693, -0.12324419617652893, -0.5821865200996399, 0.19468936324119568, -0.15951497852802277, 0.215200737118721, 0.1348843276500702, -0.41494297981262207, -0.7491717338562012, 0.04371853545308113, 0.4347790479660034, 0.3621406853199005, -0.6983209252357483, -0.7021872997283936, 0.2881099581718445, -0.5193321108818054, 0.45038965344429016, -0.054635610431432724, -0.5296801924705505, 0.015182687900960445, 0.7080159783363342, 0.46656689047813416, 0.6350008249282837, -0.0010725384345278144, 0.07085616141557693, -0.6734957695007324, 0.18486592173576355, -0.06391939520835876, 0.32123929262161255, -0.04425542429089546, -0.31932318210601807, 0.8377255797386169, 0.6552168130874634, -0.5151516199111938, -1.0714402198791504, -0.42461106181144714, -1.4516371488571167, -0.024086598306894302, 1.1334547996520996, -0.013606474734842777, -0.500492513179779, 0.30399468541145325, -0.11826767772436142, 0.19098077714443207, -0.3157717287540436, 0.7498466372489929, 0.8096805810928345, -0.35333335399627686, 0.1562255322933197, -0.6392852663993835, 0.3295210301876068, 0.5090885758399963, -1.2268280982971191, -0.11044326424598694, 0.2591698169708252, 0.31150537729263306, 0.3955467939376831, 0.6191516518592834, -0.08897114545106888, 0.23222091794013977, 0.21056652069091797, 0.02731545828282833, -0.020680667832493782, 0.07868876308202744, -0.2202431708574295, 0.09348992258310318, -0.2925183176994324, -0.4578837752342224 ]
open-llm-leaderboard/details_hakurei__lotus-12B
open-llm-leaderboard
2023-08-27T12:38:20Z
201
0
[ "region:us" ]
null
2023-08-18T11:55:33Z
--- pretty_name: Evaluation run of hakurei/lotus-12B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [hakurei/lotus-12B](https://huggingface.co/hakurei/lotus-12B) on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_hakurei__lotus-12B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-18T13:41:37.836572](https://huggingface.co/datasets/open-llm-leaderboard/details_hakurei__lotus-12B/blob/main/results_2023-07-18T13%3A41%3A37.836572.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2485367177317206,\n\ \ \"acc_stderr\": 0.03124226591981919,\n \"acc_norm\": 0.25130617872419225,\n\ \ \"acc_norm_stderr\": 0.03125347081115218,\n \"mc1\": 0.22643818849449204,\n\ \ \"mc1_stderr\": 0.014651337324602574,\n \"mc2\": 0.40115476804436745,\n\ \ \"mc2_stderr\": 0.014756133562988513\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.26535836177474403,\n \"acc_stderr\": 0.012902554762313962,\n\ \ \"acc_norm\": 0.30716723549488056,\n \"acc_norm_stderr\": 0.013481034054980945\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4054969129655447,\n\ \ \"acc_stderr\": 0.0048998450871831105,\n \"acc_norm\": 0.5270862378012349,\n\ \ \"acc_norm_stderr\": 0.004982454383162063\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.03785714465066653,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.03785714465066653\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.19736842105263158,\n \"acc_stderr\": 0.03238981601699397,\n\ \ \"acc_norm\": 0.19736842105263158,\n \"acc_norm_stderr\": 0.03238981601699397\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.28,\n\ \ \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \ \ \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.27169811320754716,\n \"acc_stderr\": 0.02737770662467071,\n\ \ \"acc_norm\": 0.27169811320754716,\n \"acc_norm_stderr\": 0.02737770662467071\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n\ \ \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.21965317919075145,\n\ \ \"acc_stderr\": 0.031568093627031744,\n \"acc_norm\": 0.21965317919075145,\n\ \ \"acc_norm_stderr\": 0.031568093627031744\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364396,\n\ \ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364396\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n\ \ \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.3021276595744681,\n \"acc_stderr\": 0.03001755447188055,\n\ \ \"acc_norm\": 0.3021276595744681,\n \"acc_norm_stderr\": 0.03001755447188055\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\ \ \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n\ \ \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n\ \ \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.24867724867724866,\n \"acc_stderr\": 0.02226181769240017,\n \"\ acc_norm\": 0.24867724867724866,\n \"acc_norm_stderr\": 0.02226181769240017\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.18253968253968253,\n\ \ \"acc_stderr\": 0.03455071019102148,\n \"acc_norm\": 0.18253968253968253,\n\ \ \"acc_norm_stderr\": 0.03455071019102148\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653694,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653694\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25806451612903225,\n\ \ \"acc_stderr\": 0.024892469172462833,\n \"acc_norm\": 0.25806451612903225,\n\ \ \"acc_norm_stderr\": 0.024892469172462833\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.29064039408866993,\n \"acc_stderr\": 0.0319474007226554,\n\ \ \"acc_norm\": 0.29064039408866993,\n \"acc_norm_stderr\": 0.0319474007226554\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\"\ : 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03453131801885416,\n\ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03453131801885416\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.24242424242424243,\n \"acc_stderr\": 0.030532892233932036,\n \"\ acc_norm\": 0.24242424242424243,\n \"acc_norm_stderr\": 0.030532892233932036\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.029778663037752954,\n\ \ \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.029778663037752954\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.22564102564102564,\n \"acc_stderr\": 0.021193632525148533,\n\ \ \"acc_norm\": 0.22564102564102564,\n \"acc_norm_stderr\": 0.021193632525148533\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.23703703703703705,\n \"acc_stderr\": 0.02592887613276612,\n \ \ \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.02592887613276612\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.18907563025210083,\n \"acc_stderr\": 0.02543511943810535,\n\ \ \"acc_norm\": 0.18907563025210083,\n \"acc_norm_stderr\": 0.02543511943810535\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.19205298013245034,\n \"acc_stderr\": 0.032162984205936135,\n \"\ acc_norm\": 0.19205298013245034,\n \"acc_norm_stderr\": 0.032162984205936135\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.24403669724770644,\n \"acc_stderr\": 0.018415286351416416,\n \"\ acc_norm\": 0.24403669724770644,\n \"acc_norm_stderr\": 0.018415286351416416\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.2962962962962963,\n \"acc_stderr\": 0.031141447823536023,\n \"\ acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.031141447823536023\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.2647058823529412,\n \"acc_stderr\": 0.030964517926923403,\n \"\ acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.030964517926923403\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \ \ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3452914798206278,\n\ \ \"acc_stderr\": 0.031911001928357954,\n \"acc_norm\": 0.3452914798206278,\n\ \ \"acc_norm_stderr\": 0.031911001928357954\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2366412213740458,\n \"acc_stderr\": 0.03727673575596918,\n\ \ \"acc_norm\": 0.2366412213740458,\n \"acc_norm_stderr\": 0.03727673575596918\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2644628099173554,\n \"acc_stderr\": 0.04026187527591206,\n \"\ acc_norm\": 0.2644628099173554,\n \"acc_norm_stderr\": 0.04026187527591206\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n\ \ \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \ \ \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.26993865030674846,\n \"acc_stderr\": 0.03487825168497892,\n\ \ \"acc_norm\": 0.26993865030674846,\n \"acc_norm_stderr\": 0.03487825168497892\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\ \ \"acc_stderr\": 0.04203277291467764,\n \"acc_norm\": 0.26785714285714285,\n\ \ \"acc_norm_stderr\": 0.04203277291467764\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.3300970873786408,\n \"acc_stderr\": 0.04656147110012352,\n\ \ \"acc_norm\": 0.3300970873786408,\n \"acc_norm_stderr\": 0.04656147110012352\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2264957264957265,\n\ \ \"acc_stderr\": 0.027421007295392916,\n \"acc_norm\": 0.2264957264957265,\n\ \ \"acc_norm_stderr\": 0.027421007295392916\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.28735632183908044,\n\ \ \"acc_stderr\": 0.0161824107306827,\n \"acc_norm\": 0.28735632183908044,\n\ \ \"acc_norm_stderr\": 0.0161824107306827\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.022497230190967547,\n\ \ \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.022497230190967547\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.02428861946604612,\n\ \ \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.02428861946604612\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2540192926045016,\n\ \ \"acc_stderr\": 0.02472386150477169,\n \"acc_norm\": 0.2540192926045016,\n\ \ \"acc_norm_stderr\": 0.02472386150477169\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2808641975308642,\n \"acc_stderr\": 0.025006469755799204,\n\ \ \"acc_norm\": 0.2808641975308642,\n \"acc_norm_stderr\": 0.025006469755799204\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2765957446808511,\n \"acc_stderr\": 0.026684564340461004,\n \ \ \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340461004\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23859191655801826,\n\ \ \"acc_stderr\": 0.010885929742002205,\n \"acc_norm\": 0.23859191655801826,\n\ \ \"acc_norm_stderr\": 0.010885929742002205\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.024562204314142317,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.024562204314142317\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.26143790849673204,\n \"acc_stderr\": 0.017776947157528037,\n \ \ \"acc_norm\": 0.26143790849673204,\n \"acc_norm_stderr\": 0.017776947157528037\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2545454545454545,\n\ \ \"acc_stderr\": 0.04172343038705383,\n \"acc_norm\": 0.2545454545454545,\n\ \ \"acc_norm_stderr\": 0.04172343038705383\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.17551020408163265,\n \"acc_stderr\": 0.024352800722970015,\n\ \ \"acc_norm\": 0.17551020408163265,\n \"acc_norm_stderr\": 0.024352800722970015\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23383084577114427,\n\ \ \"acc_stderr\": 0.029929415408348387,\n \"acc_norm\": 0.23383084577114427,\n\ \ \"acc_norm_stderr\": 0.029929415408348387\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2710843373493976,\n\ \ \"acc_stderr\": 0.03460579907553026,\n \"acc_norm\": 0.2710843373493976,\n\ \ \"acc_norm_stderr\": 0.03460579907553026\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03188578017686399,\n\ \ \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03188578017686399\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22643818849449204,\n\ \ \"mc1_stderr\": 0.014651337324602574,\n \"mc2\": 0.40115476804436745,\n\ \ \"mc2_stderr\": 0.014756133562988513\n }\n}\n```" repo_url: https://huggingface.co/hakurei/lotus-12B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|arc:challenge|25_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hellaswag|10_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-18T13:41:37.836572.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-management|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T13:41:37.836572.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_18T13_41_37.836572 path: - '**/details_harness|truthfulqa:mc|0_2023-07-18T13:41:37.836572.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-18T13:41:37.836572.parquet' - config_name: results data_files: - split: 2023_07_18T13_41_37.836572 path: - results_2023-07-18T13:41:37.836572.parquet - split: latest path: - results_2023-07-18T13:41:37.836572.parquet --- # Dataset Card for Evaluation run of hakurei/lotus-12B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/hakurei/lotus-12B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [hakurei/lotus-12B](https://huggingface.co/hakurei/lotus-12B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_hakurei__lotus-12B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-18T13:41:37.836572](https://huggingface.co/datasets/open-llm-leaderboard/details_hakurei__lotus-12B/blob/main/results_2023-07-18T13%3A41%3A37.836572.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2485367177317206, "acc_stderr": 0.03124226591981919, "acc_norm": 0.25130617872419225, "acc_norm_stderr": 0.03125347081115218, "mc1": 0.22643818849449204, "mc1_stderr": 0.014651337324602574, "mc2": 0.40115476804436745, "mc2_stderr": 0.014756133562988513 }, "harness|arc:challenge|25": { "acc": 0.26535836177474403, "acc_stderr": 0.012902554762313962, "acc_norm": 0.30716723549488056, "acc_norm_stderr": 0.013481034054980945 }, "harness|hellaswag|10": { "acc": 0.4054969129655447, "acc_stderr": 0.0048998450871831105, "acc_norm": 0.5270862378012349, "acc_norm_stderr": 0.004982454383162063 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066653, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066653 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.27169811320754716, "acc_stderr": 0.02737770662467071, "acc_norm": 0.27169811320754716, "acc_norm_stderr": 0.02737770662467071 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.21965317919075145, "acc_stderr": 0.031568093627031744, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.031568093627031744 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364396, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364396 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3021276595744681, "acc_stderr": 0.03001755447188055, "acc_norm": 0.3021276595744681, "acc_norm_stderr": 0.03001755447188055 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.23448275862068965, "acc_stderr": 0.035306258743465914, "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.02226181769240017, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.02226181769240017 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.18253968253968253, "acc_stderr": 0.03455071019102148, "acc_norm": 0.18253968253968253, "acc_norm_stderr": 0.03455071019102148 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.03861229196653694, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25806451612903225, "acc_stderr": 0.024892469172462833, "acc_norm": 0.25806451612903225, "acc_norm_stderr": 0.024892469172462833 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.0319474007226554, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.0319474007226554 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885416, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885416 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.24242424242424243, "acc_stderr": 0.030532892233932036, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.030532892233932036 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.029778663037752954, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.22564102564102564, "acc_stderr": 0.021193632525148533, "acc_norm": 0.22564102564102564, "acc_norm_stderr": 0.021193632525148533 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23703703703703705, "acc_stderr": 0.02592887613276612, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.02592887613276612 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.18907563025210083, "acc_stderr": 0.02543511943810535, "acc_norm": 0.18907563025210083, "acc_norm_stderr": 0.02543511943810535 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.19205298013245034, "acc_stderr": 0.032162984205936135, "acc_norm": 0.19205298013245034, "acc_norm_stderr": 0.032162984205936135 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24403669724770644, "acc_stderr": 0.018415286351416416, "acc_norm": 0.24403669724770644, "acc_norm_stderr": 0.018415286351416416 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.031141447823536023, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.031141447823536023 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2647058823529412, "acc_stderr": 0.030964517926923403, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.030964517926923403 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3452914798206278, "acc_stderr": 0.031911001928357954, "acc_norm": 0.3452914798206278, "acc_norm_stderr": 0.031911001928357954 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.03727673575596918, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.03727673575596918 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2644628099173554, "acc_stderr": 0.04026187527591206, "acc_norm": 0.2644628099173554, "acc_norm_stderr": 0.04026187527591206 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26993865030674846, "acc_stderr": 0.03487825168497892, "acc_norm": 0.26993865030674846, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467764, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467764 }, "harness|hendrycksTest-management|5": { "acc": 0.3300970873786408, "acc_stderr": 0.04656147110012352, "acc_norm": 0.3300970873786408, "acc_norm_stderr": 0.04656147110012352 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2264957264957265, "acc_stderr": 0.027421007295392916, "acc_norm": 0.2264957264957265, "acc_norm_stderr": 0.027421007295392916 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.28735632183908044, "acc_stderr": 0.0161824107306827, "acc_norm": 0.28735632183908044, "acc_norm_stderr": 0.0161824107306827 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2254335260115607, "acc_stderr": 0.022497230190967547, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.022497230190967547 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.23529411764705882, "acc_stderr": 0.02428861946604612, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.02428861946604612 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2540192926045016, "acc_stderr": 0.02472386150477169, "acc_norm": 0.2540192926045016, "acc_norm_stderr": 0.02472386150477169 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2808641975308642, "acc_stderr": 0.025006469755799204, "acc_norm": 0.2808641975308642, "acc_norm_stderr": 0.025006469755799204 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340461004, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340461004 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23859191655801826, "acc_stderr": 0.010885929742002205, "acc_norm": 0.23859191655801826, "acc_norm_stderr": 0.010885929742002205 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20588235294117646, "acc_stderr": 0.024562204314142317, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.024562204314142317 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.26143790849673204, "acc_stderr": 0.017776947157528037, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.017776947157528037 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2545454545454545, "acc_stderr": 0.04172343038705383, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.04172343038705383 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.17551020408163265, "acc_stderr": 0.024352800722970015, "acc_norm": 0.17551020408163265, "acc_norm_stderr": 0.024352800722970015 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.029929415408348387, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.029929415408348387 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553026, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553026 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03188578017686399, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03188578017686399 }, "harness|truthfulqa:mc|0": { "mc1": 0.22643818849449204, "mc1_stderr": 0.014651337324602574, "mc2": 0.40115476804436745, "mc2_stderr": 0.014756133562988513 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.725322425365448, -0.8539494276046753, 0.25762253999710083, 0.24380367994308472, -0.18151035904884338, -0.05906667560338974, -0.005240367725491524, -0.20246417820453644, 0.5871584415435791, -0.056526269763708115, -0.4933103919029236, -0.717047393321991, -0.4415128529071808, 0.223758727312088, -0.0001323781325481832, 0.7927691340446472, -0.2004765123128891, -0.13904035091400146, 0.10572030395269394, -0.031915437430143356, -0.2727035880088806, -0.3043105900287628, -0.5070528984069824, -0.3547787070274353, 0.17314346134662628, 0.4453694224357605, 0.43426358699798584, 0.854516863822937, 0.7307551503181458, 0.2874133586883545, -0.3138900101184845, -0.003364765550941229, -0.1555144488811493, -0.29761889576911926, 0.4313764274120331, -0.35172364115715027, -0.8654465675354004, 0.3121297061443329, 0.7578693628311157, 0.6305501461029053, -0.09778711944818497, 0.29294806718826294, 0.04579836130142212, 0.5786187648773193, -0.37989771366119385, 0.04112129658460617, -0.25766417384147644, 0.23003309965133667, -0.22703738510608673, -0.27357935905456543, -0.32617998123168945, -0.22575780749320984, -0.18484361469745636, -0.9185330271720886, 0.2402126044034958, 0.34316617250442505, 1.601750135421753, -0.13889804482460022, -0.22860462963581085, 0.09037946909666061, -0.11473482847213745, 1.0220247507095337, -0.8572381734848022, 0.351565420627594, 0.8129616975784302, 0.16662541031837463, -0.16558974981307983, -0.5800436735153198, -0.61368328332901, 0.1066834032535553, -0.38707074522972107, 0.3816065788269043, -0.05088353902101517, -0.20523959398269653, 0.36987197399139404, 0.6832521557807922, -0.6530553698539734, 0.1772746592760086, -0.6448649764060974, -0.1671072095632553, 1.067319393157959, 0.3606790602207184, 0.06192397698760033, -0.41340741515159607, -0.6667972803115845, -0.64227294921875, -0.41991496086120605, 0.26446107029914856, 0.41441693902015686, 0.34869492053985596, -0.4200594425201416, 0.6736355423927307, -0.375749409198761, 0.5710625052452087, 0.4117473065853119, 0.0074321492575109005, 0.8643000721931458, -0.6768696308135986, -0.5540471076965332, -0.06690400093793869, 1.1094335317611694, 0.5440320372581482, 0.048534095287323, 0.2756836712360382, 0.026835773140192032, -0.08515744656324387, 0.050537995994091034, -0.870323121547699, -0.30639758706092834, 0.13812236487865448, -0.4256591498851776, -0.5192018151283264, 0.34548354148864746, -0.8971464037895203, 0.1204998567700386, -0.01865462027490139, 0.389193058013916, -0.5195191502571106, -0.09942527115345001, 0.2270706593990326, -0.38337889313697815, 0.8357416987419128, -0.17225369811058044, -0.7975351214408875, 0.4254186749458313, 0.5190268158912659, 0.7596738934516907, -0.11193589866161346, -0.43018823862075806, -0.13390758633613586, -0.07497137039899826, -0.29881682991981506, 0.5250796675682068, -0.31529927253723145, -0.4631950855255127, -0.29391786456108093, 0.3059220314025879, -0.24701589345932007, -0.3548259437084198, 0.7542925477027893, -0.215006485581398, 0.22798655927181244, -0.458310604095459, -0.6333019733428955, 0.13891179859638214, 0.37205731868743896, -0.4058690071105957, 1.2766350507736206, 0.23401705920696259, -0.8199175596237183, 0.44737735390663147, -0.6119138598442078, -0.2010204792022705, -0.021898547187447548, -0.06906460225582123, -0.8677263855934143, -0.2980043292045593, 0.1913912445306778, 0.4077736735343933, -0.16075631976127625, -0.16363544762134552, -0.4034789800643921, -0.3627222180366516, 0.3190123736858368, -0.15400750935077667, 1.2274657487869263, 0.011179765686392784, -0.7567580938339233, -0.10643061250448227, -1.2467544078826904, 0.27692750096321106, 0.21432524919509888, -0.36548393964767456, -0.1932816207408905, -0.4949304461479187, -0.028679626062512398, 0.13349294662475586, 0.277961403131485, -0.8021445274353027, 0.2788998782634735, -0.328049898147583, 0.1423713117837906, 1.2720409631729126, 0.063719242811203, 0.13520263135433197, -0.5547429323196411, 0.5333733558654785, 0.262906014919281, 0.19075420498847961, 0.33675238490104675, -0.6185908317565918, -0.8257501721382141, -0.5328567624092102, -0.056522540748119354, 0.6006290912628174, -0.15953883528709412, 1.1576937437057495, 0.06485715508460999, -0.9081089496612549, -0.49290668964385986, -0.13094037771224976, 0.4952473044395447, 0.7804835438728333, 0.6091241240501404, -0.0451187938451767, -0.6216481328010559, -1.1016632318496704, -0.3009425699710846, -0.1648327112197876, 0.17155979573726654, 0.22864320874214172, 1.0268688201904297, -0.23927362263202667, 0.57078617811203, -1.0651865005493164, -0.23161914944648743, 0.18255361914634705, -0.03887202590703964, 0.7870546579360962, 0.7472346425056458, 0.5554676651954651, -0.6438518166542053, -0.5599862337112427, 0.22069677710533142, -0.900976300239563, -0.07446994632482529, 0.11258279532194138, -0.32640036940574646, 0.17254003882408142, 0.12834379076957703, -0.7217961549758911, 0.5657995939254761, 0.15122325718402863, -1.0630674362182617, 1.0152698755264282, -0.36901992559432983, 0.596114456653595, -1.0489851236343384, 0.17658138275146484, -0.05446995422244072, 0.0692082867026329, -0.4831129014492035, 0.006469015032052994, 0.13475239276885986, 0.434339314699173, -0.46909260749816895, 0.7846928834915161, -0.7185702323913574, -0.08620718121528625, 0.44626519083976746, 0.10335332900285721, -0.1301814764738083, 0.35760971903800964, -0.22741146385669708, 0.7959645390510559, 0.7728435397148132, -0.47969239950180054, 0.5399636626243591, 0.4269994795322418, -0.23040594160556793, 0.7386999130249023, -0.4625357389450073, -0.3236757516860962, 0.3106720745563507, -0.07122399657964706, -0.8169793486595154, -0.49898505210876465, 0.037031982094049454, -0.6062251329421997, -0.08749569207429886, 0.40289199352264404, -0.27708786725997925, -0.8284775018692017, -0.9373851418495178, 0.3194865882396698, 0.758250892162323, -0.41869720816612244, -0.1600092053413391, 0.05479170009493828, 0.12361215054988861, -0.8243706822395325, -0.8424797654151917, -0.49481454491615295, -0.23553375899791718, -0.7128826379776001, 0.3169410526752472, -0.23428097367286682, -0.2847810983657837, -0.07904060184955597, -0.20526108145713806, -0.30969223380088806, 0.012280277907848358, 0.15173695981502533, 0.673619270324707, -0.415988028049469, -0.3286348581314087, -0.2573144733905792, -0.19702330231666565, 0.23769938945770264, -0.08804777264595032, 0.36885520815849304, -0.45897626876831055, -0.4469482898712158, -0.4389229118824005, -0.055661436170339584, 0.7018700838088989, -0.11234083771705627, 0.7296032905578613, 0.4486130475997925, -0.34843626618385315, 0.0055551654659211636, -0.26876530051231384, -0.2733231484889984, -0.581649661064148, 0.2756127119064331, -0.526212751865387, -1.008514165878296, 0.8127930164337158, 0.5519005656242371, 0.05263820290565491, 1.12617027759552, 0.621559739112854, -0.27936679124832153, 0.9980137944221497, 0.028286732733249664, 0.322381854057312, 0.35110387206077576, -0.6637018322944641, 0.15104521811008453, -0.9043717384338379, -0.299524188041687, -0.5813953280448914, -0.4689668118953705, -0.7445390820503235, -0.04020516946911812, 0.2896793782711029, 0.11634672433137894, -0.6941773891448975, 0.5876452922821045, -0.8298223614692688, 0.5810109972953796, 0.5669465661048889, 0.25651851296424866, 0.16513384878635406, -0.1757657825946808, -0.39392492175102234, -0.13140586018562317, -0.4520048499107361, -0.23191554844379425, 1.210429072380066, 0.2750950753688812, 0.7540789246559143, 0.09479882568120956, 0.8888779878616333, 0.08922197669744492, -0.048657290637493134, -0.5458598136901855, 0.6536238789558411, 0.11125822365283966, -0.791569709777832, -0.4272388815879822, -0.5002631545066833, -1.1111215353012085, 0.3753867745399475, -0.125322625041008, -0.8112165927886963, 0.10106973350048065, 0.00460729980841279, -0.22503922879695892, 0.4973176419734955, -0.5430325865745544, 0.8632401823997498, -0.10572263598442078, -0.5130107402801514, 0.07371368259191513, -0.8192795515060425, 0.4712686836719513, 0.17021633684635162, 0.26843079924583435, 0.06153160333633423, 0.25182634592056274, 1.1813745498657227, -0.8139826059341431, 0.41285425424575806, 0.07144756615161896, 0.03873402997851372, 0.31914910674095154, -0.16510967910289764, 0.4879198968410492, 0.07919298857450485, -0.0003890308435074985, -0.16173392534255981, 0.2761281430721283, -0.8909085392951965, -0.06165353208780289, 0.9380902647972107, -0.9871359467506409, -0.623541533946991, -0.8925459980964661, -0.5142819881439209, 0.11035598069429398, 0.5993188619613647, 0.3759308159351349, 0.5189176797866821, 0.014455566182732582, 0.44691118597984314, 0.8283087611198425, -0.152299165725708, 0.6096485257148743, 0.24246689677238464, 0.08759155124425888, -0.6829984188079834, 0.852695882320404, 0.07626461237668991, 0.3406015634536743, 0.2683667838573456, 0.37799131870269775, -0.5096313953399658, -0.17545956373214722, -0.22442114353179932, 0.5354798436164856, -0.6508322358131409, -0.2536277770996094, -0.3327483534812927, -0.39594125747680664, -0.783258318901062, -0.6538141965866089, -0.32185840606689453, -0.48684605956077576, -0.49163275957107544, -0.4781954288482666, 0.5653077960014343, 0.49688786268234253, -0.36926189064979553, 0.04864978417754173, -0.4892610013484955, 0.23412173986434937, 0.30825361609458923, 0.5418240427970886, -0.3946773111820221, -0.5790627002716064, 0.012221035547554493, -0.15861192345619202, -0.5952274799346924, -0.9606667757034302, 0.35619693994522095, -0.07505378127098083, 0.5022339224815369, 0.6059410572052002, 0.07798650860786438, 0.861381471157074, -0.20243099331855774, 1.0492442846298218, 0.3097921311855316, -0.7743518948554993, 0.7412534952163696, -0.3506084084510803, 0.196003720164299, 0.6328756213188171, 0.1899736076593399, -0.19130530953407288, -0.6760753989219666, -1.3100321292877197, -0.7558773159980774, 0.6617671847343445, 0.4197324514389038, -0.2970349192619324, 0.058849770575761795, 0.14711494743824005, -0.3102506697177887, -0.19283278286457062, -0.6686835289001465, -0.8946682214736938, -0.1647985279560089, -0.519184410572052, 0.1147105023264885, 0.04578230530023575, -0.3823394477367401, -0.8039409518241882, 0.9735479950904846, 0.044924620538949966, 0.6242490410804749, 0.4397316575050354, 0.07648547738790512, 0.025540439411997795, 0.46228161454200745, 0.9281308650970459, 0.7454826235771179, -0.46736040711402893, 0.4472484588623047, 0.41269463300704956, -1.05824613571167, 0.4791359305381775, 0.3389190137386322, -0.07340282946825027, -0.05418600142002106, 0.4781225025653839, 0.4320049285888672, 0.006864645052701235, -0.1503332406282425, 0.6249361634254456, -0.01671718992292881, -0.5502849817276001, -0.42137080430984497, 0.08836500346660614, -0.11146321147680283, -0.06557537615299225, 0.36350706219673157, -0.1605185717344284, -0.06974908709526062, -0.4663327634334564, 0.48451051115989685, 0.3766486644744873, -0.4694671332836151, -0.1463056355714798, 0.7400449514389038, -0.18262194097042084, -0.17453598976135254, 0.34568434953689575, -0.20470523834228516, -0.6070127487182617, 1.1173784732818604, 0.5846461653709412, 0.6840543746948242, -0.28039535880088806, -0.1000179573893547, 0.9244643449783325, 0.40988004207611084, 0.0005382348317652941, 0.49536004662513733, 0.320866197347641, -0.2540304958820343, 0.174240380525589, -0.8507514595985413, -0.04329229146242142, 0.15987901389598846, -0.8362271785736084, 0.3949644863605499, -0.529223620891571, -0.1542348563671112, 0.05433603376150131, 0.4330988824367523, -0.4127807915210724, 0.5478531122207642, -0.42197245359420776, 1.2242615222930908, -0.9810436964035034, 0.7209839820861816, 0.7788475155830383, -0.5733868479728699, -1.0012315511703491, -0.5313403606414795, 0.01040196605026722, -0.8342204689979553, 0.583206832408905, -0.024599574506282806, 0.19869744777679443, -0.08904906362295151, -0.7061235308647156, -0.8978614211082458, 1.4222629070281982, -0.09909161925315857, -0.3869169354438782, 0.24762926995754242, -0.02623041905462742, 0.43125104904174805, 0.13729706406593323, 0.5930345058441162, 0.7603487372398376, 0.8493558168411255, -0.09480833262205124, -0.7236813306808472, 0.34805119037628174, -0.5310265421867371, -0.32876941561698914, 0.49383682012557983, -0.9486623406410217, 1.176716685295105, 0.03329608961939812, 0.22733379900455475, -0.19192931056022644, 0.6940957307815552, 0.8037083745002747, 0.29054662585258484, 0.3699191212654114, 0.9328621625900269, 0.8632668852806091, -0.512765645980835, 1.0159913301467896, -0.2473706752061844, 0.8480271697044373, 0.658071756362915, 0.20603302121162415, 0.7498399019241333, 0.6567836403846741, -0.5553884506225586, 0.5451080799102783, 0.8488314747810364, -0.3213486075401306, 0.382840633392334, 0.258343368768692, -0.13268926739692688, -0.160949245095253, 0.44011902809143066, -0.8567325472831726, 0.10250589996576309, 0.06583336740732193, -0.3199584484100342, 0.0987168699502945, -0.44316890835762024, 0.34969785809516907, -0.06377584487199783, -0.020674554631114006, 0.36649805307388306, 0.05968605354428291, -0.4643179178237915, 0.9446496367454529, -0.12128092348575592, 0.7383084893226624, -0.5181595683097839, -0.07018980383872986, -0.38709598779678345, 0.6429910063743591, -0.4426993429660797, -1.038627028465271, 0.19020934402942657, 0.06571635603904724, -0.10032319277524948, -0.16217109560966492, 0.7191272377967834, -0.18599358201026917, -0.779607892036438, 0.1036229282617569, 0.05137292295694351, 0.09052503108978271, 0.5609170794487, -0.6510714292526245, -0.3343454599380493, -0.03487995266914368, -0.5641161203384399, 0.10526151210069656, 0.3260266184806824, 0.28254860639572144, 0.5430351495742798, 0.6149157881736755, 0.15249371528625488, 0.38361111283302307, -0.5862008929252625, 0.7882180213928223, -1.0852832794189453, -0.7608247995376587, -0.9503204226493835, 0.4409802258014679, -0.3385995626449585, -0.8634013533592224, 1.0200731754302979, 1.0796860456466675, 0.8529525399208069, 0.0028903724160045385, 0.6363019347190857, -0.35562121868133545, 0.2755424380302429, -0.3853427469730377, 0.9487350583076477, -0.8480987548828125, -0.20190367102622986, -0.2669401168823242, -0.6996376514434814, -0.4124917685985565, 0.8597809076309204, -0.18346504867076874, 0.0662241131067276, 1.0900651216506958, 0.6835297346115112, -0.13382752239704132, 0.09310255944728851, -0.06209361180663109, 0.588968813419342, 0.4116322696208954, 0.9759065508842468, 0.6233294606208801, -0.7629470825195312, 0.3083454668521881, -0.504524290561676, -0.42610952258110046, -0.38853293657302856, -0.46678173542022705, -0.8782994747161865, -0.4872414767742157, -0.21547548472881317, -0.6279187202453613, -0.1251763254404068, 1.0099323987960815, 0.4642447233200073, -0.9143890142440796, -0.4553591012954712, -0.13591526448726654, 0.14003433287143707, -0.5682881474494934, -0.4138025939464569, 0.7509925365447998, -0.14025405049324036, -0.5272644758224487, 0.15949101746082306, -0.1296079158782959, 0.23062975704669952, 0.09756651520729065, -0.4422420561313629, -0.7185127139091492, 0.01617463119328022, 0.40474286675453186, 0.2965753972530365, -0.6700608134269714, -0.7022815942764282, 0.34010377526283264, -0.5232990980148315, 0.43901151418685913, -0.034538574516773224, -0.47730517387390137, 0.027723586186766624, 0.6761133074760437, 0.5263817310333252, 0.6971035003662109, -0.048358723521232605, 0.0755760446190834, -0.6434005498886108, 0.1548931896686554, -0.053080566227436066, 0.2946220338344574, -0.014161006547510624, -0.3205721974372864, 0.7809133529663086, 0.7030453681945801, -0.5317429900169373, -1.0837494134902954, -0.4251428246498108, -1.4376325607299805, -0.021001657471060753, 1.122426986694336, 0.00306700705550611, -0.5338875651359558, 0.2730571925640106, -0.11661118268966675, 0.2099737823009491, -0.3075074255466461, 0.7496815919876099, 0.8339579701423645, -0.365000456571579, 0.11203447729349136, -0.6137508153915405, 0.38538840413093567, 0.5586846470832825, -1.221347689628601, -0.0692969337105751, 0.24382956326007843, 0.27315080165863037, 0.3721471428871155, 0.6331952214241028, -0.10797438025474548, 0.28432679176330566, 0.21741144359111786, 0.032001107931137085, -0.0031008939258754253, 0.07096107304096222, -0.23748819530010223, 0.06757458299398422, -0.2321932166814804, -0.4695494472980499 ]
open-llm-leaderboard/details_Kiddyz__testlm-1
open-llm-leaderboard
2023-08-27T12:38:55Z
201
0
[ "region:us" ]
null
2023-08-18T11:59:10Z
--- pretty_name: Evaluation run of Kiddyz/testlm-1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Kiddyz/testlm-1](https://huggingface.co/Kiddyz/testlm-1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kiddyz__testlm-1\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-16T12:53:22.897812](https://huggingface.co/datasets/open-llm-leaderboard/details_Kiddyz__testlm-1/blob/main/results_2023-08-16T12%3A53%3A22.897812.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5128834307003443,\n\ \ \"acc_stderr\": 0.03501260490290392,\n \"acc_norm\": 0.5166256154161327,\n\ \ \"acc_norm_stderr\": 0.03500071412093006,\n \"mc1\": 0.32802937576499386,\n\ \ \"mc1_stderr\": 0.01643563293281503,\n \"mc2\": 0.48413168566081527,\n\ \ \"mc2_stderr\": 0.015167638286466481\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5017064846416383,\n \"acc_stderr\": 0.014611305705056992,\n\ \ \"acc_norm\": 0.5349829351535836,\n \"acc_norm_stderr\": 0.014575583922019669\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5705038836885082,\n\ \ \"acc_stderr\": 0.004939925958728884,\n \"acc_norm\": 0.758016331408086,\n\ \ \"acc_norm_stderr\": 0.004274091605308121\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\ \ \"acc_stderr\": 0.04313531696750573,\n \"acc_norm\": 0.4740740740740741,\n\ \ \"acc_norm_stderr\": 0.04313531696750573\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5131578947368421,\n \"acc_stderr\": 0.04067533136309174,\n\ \ \"acc_norm\": 0.5131578947368421,\n \"acc_norm_stderr\": 0.04067533136309174\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5433962264150943,\n \"acc_stderr\": 0.03065674869673943,\n\ \ \"acc_norm\": 0.5433962264150943,\n \"acc_norm_stderr\": 0.03065674869673943\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5555555555555556,\n\ \ \"acc_stderr\": 0.041553199555931467,\n \"acc_norm\": 0.5555555555555556,\n\ \ \"acc_norm_stderr\": 0.041553199555931467\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"\ acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4624277456647399,\n\ \ \"acc_stderr\": 0.0380168510452446,\n \"acc_norm\": 0.4624277456647399,\n\ \ \"acc_norm_stderr\": 0.0380168510452446\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n\ \ \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146267,\n\ \ \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146267\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n\ \ \"acc_stderr\": 0.04303684033537314,\n \"acc_norm\": 0.2982456140350877,\n\ \ \"acc_norm_stderr\": 0.04303684033537314\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.041665675771015785,\n\ \ \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.041665675771015785\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3333333333333333,\n \"acc_stderr\": 0.0242785680243077,\n \"acc_norm\"\ : 0.3333333333333333,\n \"acc_norm_stderr\": 0.0242785680243077\n },\n\ \ \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\ \ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\ \ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.5903225806451613,\n \"acc_stderr\": 0.027976054915347368,\n \"\ acc_norm\": 0.5903225806451613,\n \"acc_norm_stderr\": 0.027976054915347368\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.35960591133004927,\n \"acc_stderr\": 0.033764582465095665,\n \"\ acc_norm\": 0.35960591133004927,\n \"acc_norm_stderr\": 0.033764582465095665\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\ : 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n\ \ \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6262626262626263,\n \"acc_stderr\": 0.03446897738659333,\n \"\ acc_norm\": 0.6262626262626263,\n \"acc_norm_stderr\": 0.03446897738659333\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7202072538860104,\n \"acc_stderr\": 0.03239637046735704,\n\ \ \"acc_norm\": 0.7202072538860104,\n \"acc_norm_stderr\": 0.03239637046735704\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.49743589743589745,\n \"acc_stderr\": 0.025350672979412202,\n\ \ \"acc_norm\": 0.49743589743589745,\n \"acc_norm_stderr\": 0.025350672979412202\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073838,\n \ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073838\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5210084033613446,\n \"acc_stderr\": 0.03244980849990029,\n \ \ \"acc_norm\": 0.5210084033613446,\n \"acc_norm_stderr\": 0.03244980849990029\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"\ acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7119266055045872,\n \"acc_stderr\": 0.01941644589263603,\n \"\ acc_norm\": 0.7119266055045872,\n \"acc_norm_stderr\": 0.01941644589263603\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321616,\n \"\ acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321616\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7156862745098039,\n \"acc_stderr\": 0.03166009679399813,\n \"\ acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.03166009679399813\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7088607594936709,\n \"acc_stderr\": 0.02957160106575337,\n \ \ \"acc_norm\": 0.7088607594936709,\n \"acc_norm_stderr\": 0.02957160106575337\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5919282511210763,\n\ \ \"acc_stderr\": 0.03298574607842822,\n \"acc_norm\": 0.5919282511210763,\n\ \ \"acc_norm_stderr\": 0.03298574607842822\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262972,\n\ \ \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262972\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6528925619834711,\n \"acc_stderr\": 0.043457245702925335,\n \"\ acc_norm\": 0.6528925619834711,\n \"acc_norm_stderr\": 0.043457245702925335\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5833333333333334,\n\ \ \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.5833333333333334,\n\ \ \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5705521472392638,\n \"acc_stderr\": 0.03889066619112722,\n\ \ \"acc_norm\": 0.5705521472392638,\n \"acc_norm_stderr\": 0.03889066619112722\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n\ \ \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n\ \ \"acc_stderr\": 0.027778835904935434,\n \"acc_norm\": 0.7649572649572649,\n\ \ \"acc_norm_stderr\": 0.027778835904935434\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7088122605363985,\n\ \ \"acc_stderr\": 0.0162460870697014,\n \"acc_norm\": 0.7088122605363985,\n\ \ \"acc_norm_stderr\": 0.0162460870697014\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5173410404624278,\n \"acc_stderr\": 0.026902900458666647,\n\ \ \"acc_norm\": 0.5173410404624278,\n \"acc_norm_stderr\": 0.026902900458666647\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.29720670391061454,\n\ \ \"acc_stderr\": 0.015285313353641602,\n \"acc_norm\": 0.29720670391061454,\n\ \ \"acc_norm_stderr\": 0.015285313353641602\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.028452639985088006,\n\ \ \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.028452639985088006\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6045016077170418,\n\ \ \"acc_stderr\": 0.027770918531427838,\n \"acc_norm\": 0.6045016077170418,\n\ \ \"acc_norm_stderr\": 0.027770918531427838\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5709876543209876,\n \"acc_stderr\": 0.027538925613470863,\n\ \ \"acc_norm\": 0.5709876543209876,\n \"acc_norm_stderr\": 0.027538925613470863\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3971631205673759,\n \"acc_stderr\": 0.0291898056735871,\n \ \ \"acc_norm\": 0.3971631205673759,\n \"acc_norm_stderr\": 0.0291898056735871\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3754889178617992,\n\ \ \"acc_stderr\": 0.012367945396728208,\n \"acc_norm\": 0.3754889178617992,\n\ \ \"acc_norm_stderr\": 0.012367945396728208\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4852941176470588,\n \"acc_stderr\": 0.03035969707904611,\n\ \ \"acc_norm\": 0.4852941176470588,\n \"acc_norm_stderr\": 0.03035969707904611\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.49836601307189543,\n \"acc_stderr\": 0.020227726838150124,\n \ \ \"acc_norm\": 0.49836601307189543,\n \"acc_norm_stderr\": 0.020227726838150124\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6081632653061224,\n \"acc_stderr\": 0.031251275910891656,\n\ \ \"acc_norm\": 0.6081632653061224,\n \"acc_norm_stderr\": 0.031251275910891656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6716417910447762,\n\ \ \"acc_stderr\": 0.033206858897443244,\n \"acc_norm\": 0.6716417910447762,\n\ \ \"acc_norm_stderr\": 0.033206858897443244\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3795180722891566,\n\ \ \"acc_stderr\": 0.03777798822748018,\n \"acc_norm\": 0.3795180722891566,\n\ \ \"acc_norm_stderr\": 0.03777798822748018\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.03565079670708311,\n\ \ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.03565079670708311\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.32802937576499386,\n\ \ \"mc1_stderr\": 0.01643563293281503,\n \"mc2\": 0.48413168566081527,\n\ \ \"mc2_stderr\": 0.015167638286466481\n }\n}\n```" repo_url: https://huggingface.co/Kiddyz/testlm-1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|arc:challenge|25_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hellaswag|10_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-16T12:53:22.897812.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-management|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T12:53:22.897812.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_16T12_53_22.897812 path: - '**/details_harness|truthfulqa:mc|0_2023-08-16T12:53:22.897812.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-16T12:53:22.897812.parquet' - config_name: results data_files: - split: 2023_08_16T12_53_22.897812 path: - results_2023-08-16T12:53:22.897812.parquet - split: latest path: - results_2023-08-16T12:53:22.897812.parquet --- # Dataset Card for Evaluation run of Kiddyz/testlm-1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Kiddyz/testlm-1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Kiddyz/testlm-1](https://huggingface.co/Kiddyz/testlm-1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kiddyz__testlm-1", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-16T12:53:22.897812](https://huggingface.co/datasets/open-llm-leaderboard/details_Kiddyz__testlm-1/blob/main/results_2023-08-16T12%3A53%3A22.897812.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5128834307003443, "acc_stderr": 0.03501260490290392, "acc_norm": 0.5166256154161327, "acc_norm_stderr": 0.03500071412093006, "mc1": 0.32802937576499386, "mc1_stderr": 0.01643563293281503, "mc2": 0.48413168566081527, "mc2_stderr": 0.015167638286466481 }, "harness|arc:challenge|25": { "acc": 0.5017064846416383, "acc_stderr": 0.014611305705056992, "acc_norm": 0.5349829351535836, "acc_norm_stderr": 0.014575583922019669 }, "harness|hellaswag|10": { "acc": 0.5705038836885082, "acc_stderr": 0.004939925958728884, "acc_norm": 0.758016331408086, "acc_norm_stderr": 0.004274091605308121 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750573, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750573 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5131578947368421, "acc_stderr": 0.04067533136309174, "acc_norm": 0.5131578947368421, "acc_norm_stderr": 0.04067533136309174 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5433962264150943, "acc_stderr": 0.03065674869673943, "acc_norm": 0.5433962264150943, "acc_norm_stderr": 0.03065674869673943 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5555555555555556, "acc_stderr": 0.041553199555931467, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.041553199555931467 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4624277456647399, "acc_stderr": 0.0380168510452446, "acc_norm": 0.4624277456647399, "acc_norm_stderr": 0.0380168510452446 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146267, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537314, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537314 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.041665675771015785, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0242785680243077, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0242785680243077 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5903225806451613, "acc_stderr": 0.027976054915347368, "acc_norm": 0.5903225806451613, "acc_norm_stderr": 0.027976054915347368 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.033764582465095665, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.033764582465095665 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6262626262626263, "acc_stderr": 0.03446897738659333, "acc_norm": 0.6262626262626263, "acc_norm_stderr": 0.03446897738659333 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7202072538860104, "acc_stderr": 0.03239637046735704, "acc_norm": 0.7202072538860104, "acc_norm_stderr": 0.03239637046735704 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412202, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412202 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073838, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073838 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5210084033613446, "acc_stderr": 0.03244980849990029, "acc_norm": 0.5210084033613446, "acc_norm_stderr": 0.03244980849990029 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7119266055045872, "acc_stderr": 0.01941644589263603, "acc_norm": 0.7119266055045872, "acc_norm_stderr": 0.01941644589263603 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321616, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321616 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7156862745098039, "acc_stderr": 0.03166009679399813, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.03166009679399813 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7088607594936709, "acc_stderr": 0.02957160106575337, "acc_norm": 0.7088607594936709, "acc_norm_stderr": 0.02957160106575337 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5919282511210763, "acc_stderr": 0.03298574607842822, "acc_norm": 0.5919282511210763, "acc_norm_stderr": 0.03298574607842822 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262972, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262972 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.043457245702925335, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.043457245702925335 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356461, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5705521472392638, "acc_stderr": 0.03889066619112722, "acc_norm": 0.5705521472392638, "acc_norm_stderr": 0.03889066619112722 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935434, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935434 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7088122605363985, "acc_stderr": 0.0162460870697014, "acc_norm": 0.7088122605363985, "acc_norm_stderr": 0.0162460870697014 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5173410404624278, "acc_stderr": 0.026902900458666647, "acc_norm": 0.5173410404624278, "acc_norm_stderr": 0.026902900458666647 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.29720670391061454, "acc_stderr": 0.015285313353641602, "acc_norm": 0.29720670391061454, "acc_norm_stderr": 0.015285313353641602 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5555555555555556, "acc_stderr": 0.028452639985088006, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.028452639985088006 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6045016077170418, "acc_stderr": 0.027770918531427838, "acc_norm": 0.6045016077170418, "acc_norm_stderr": 0.027770918531427838 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5709876543209876, "acc_stderr": 0.027538925613470863, "acc_norm": 0.5709876543209876, "acc_norm_stderr": 0.027538925613470863 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3971631205673759, "acc_stderr": 0.0291898056735871, "acc_norm": 0.3971631205673759, "acc_norm_stderr": 0.0291898056735871 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3754889178617992, "acc_stderr": 0.012367945396728208, "acc_norm": 0.3754889178617992, "acc_norm_stderr": 0.012367945396728208 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4852941176470588, "acc_stderr": 0.03035969707904611, "acc_norm": 0.4852941176470588, "acc_norm_stderr": 0.03035969707904611 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.49836601307189543, "acc_stderr": 0.020227726838150124, "acc_norm": 0.49836601307189543, "acc_norm_stderr": 0.020227726838150124 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6081632653061224, "acc_stderr": 0.031251275910891656, "acc_norm": 0.6081632653061224, "acc_norm_stderr": 0.031251275910891656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6716417910447762, "acc_stderr": 0.033206858897443244, "acc_norm": 0.6716417910447762, "acc_norm_stderr": 0.033206858897443244 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-virology|5": { "acc": 0.3795180722891566, "acc_stderr": 0.03777798822748018, "acc_norm": 0.3795180722891566, "acc_norm_stderr": 0.03777798822748018 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6842105263157895, "acc_stderr": 0.03565079670708311, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.03565079670708311 }, "harness|truthfulqa:mc|0": { "mc1": 0.32802937576499386, "mc1_stderr": 0.01643563293281503, "mc2": 0.48413168566081527, "mc2_stderr": 0.015167638286466481 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.716663122177124, -0.8610879182815552, 0.231853187084198, 0.1909383237361908, -0.1808049976825714, -0.06206648424267769, 0.05510180443525314, -0.20601660013198853, 0.5284912586212158, -0.10129334777593613, -0.5439669489860535, -0.7103095054626465, -0.4486943483352661, 0.21380893886089325, -0.06087213009595871, 0.8376850485801697, -0.16190341114997864, -0.13015194237232208, 0.06303569674491882, 0.0038035314064472914, -0.2121279537677765, -0.3578987717628479, -0.49946895241737366, -0.33976826071739197, 0.19551996886730194, 0.4461714029312134, 0.44991815090179443, 0.7895122170448303, 0.6408074498176575, 0.27635058760643005, -0.329990029335022, -0.03766770288348198, -0.17140136659145355, -0.2757928669452667, 0.39890056848526, -0.3283429443836212, -0.840434730052948, 0.32393941283226013, 0.7545126676559448, 0.6335178017616272, -0.09480968117713928, 0.32501667737960815, 0.01577414572238922, 0.5758612155914307, -0.35621577501296997, 0.033174220472574234, -0.2758811116218567, 0.2560776472091675, -0.1755763739347458, -0.24097086489200592, -0.30937695503234863, -0.25111642479896545, -0.13241349160671234, -0.8666507005691528, 0.27889350056648254, 0.3392921984195709, 1.5648200511932373, -0.14521750807762146, -0.24129155278205872, 0.0905250608921051, -0.10933483392000198, 1.0252572298049927, -0.8989266753196716, 0.3584394156932831, 0.7969552278518677, 0.11692935973405838, -0.1678592562675476, -0.5796182155609131, -0.6570853590965271, 0.0824858620762825, -0.3869397044181824, 0.3521048128604889, -0.051926590502262115, -0.19915488362312317, 0.39289459586143494, 0.6340906620025635, -0.7016035914421082, 0.13936081528663635, -0.6366118788719177, -0.15494920313358307, 1.089539885520935, 0.36163604259490967, 0.1001843512058258, -0.3530837893486023, -0.701482892036438, -0.659900426864624, -0.4094039499759674, 0.28500816226005554, 0.44383418560028076, 0.4011751711368561, -0.3836614191532135, 0.6773507595062256, -0.4081844389438629, 0.5612973570823669, 0.3813607692718506, 0.02298264019191265, 0.8935168981552124, -0.6996553540229797, -0.5616427063941956, -0.039591215550899506, 1.082281470298767, 0.5230374932289124, 0.08069862425327301, 0.23855164647102356, 0.01964007131755352, -0.092765673995018, 0.0482022687792778, -0.8414215445518494, -0.2976475656032562, 0.1465092897415161, -0.3805738091468811, -0.4901621639728546, 0.3576928377151489, -0.8791226148605347, 0.17020754516124725, -0.03007877804338932, 0.43629276752471924, -0.49822700023651123, -0.11216774582862854, 0.25553300976753235, -0.3982788920402527, 0.8366063833236694, -0.18116815388202667, -0.7907379865646362, 0.4328662157058716, 0.5048858523368835, 0.7234557867050171, -0.08423805236816406, -0.44880592823028564, -0.09273695200681686, -0.14036990702152252, -0.31178197264671326, 0.5160642862319946, -0.24907439947128296, -0.42520004510879517, -0.2785630524158478, 0.27910491824150085, -0.3082626461982727, -0.33427783846855164, 0.7433543801307678, -0.22157800197601318, 0.25446072220802307, -0.4152649939060211, -0.6580813527107239, 0.1435372233390808, 0.4140150547027588, -0.4004255533218384, 1.3338006734848022, 0.3024072051048279, -0.8145695328712463, 0.4487636685371399, -0.5739116668701172, -0.20381151139736176, -0.023777224123477936, -0.01436297781765461, -0.8106251955032349, -0.28638678789138794, 0.14855526387691498, 0.3633632957935333, -0.11004126816987991, -0.11463092267513275, -0.37241989374160767, -0.366264671087265, 0.3412298262119293, -0.16336418688297272, 1.251304030418396, -0.035381730645895004, -0.7437276244163513, -0.08707267791032791, -1.230180025100708, 0.3587365448474884, 0.22029899060726166, -0.3420814275741577, -0.19281712174415588, -0.4639909565448761, -0.0059624165296554565, 0.2336941957473755, 0.2462351769208908, -0.8049637079238892, 0.2830750346183777, -0.36835983395576477, 0.1240655779838562, 1.253763198852539, 0.006130571942776442, 0.11314142495393753, -0.559861421585083, 0.5481042265892029, 0.1985870748758316, 0.2541294991970062, 0.3894076943397522, -0.6022728681564331, -0.7973347902297974, -0.47520169615745544, -0.039777807891368866, 0.5877289175987244, -0.16875618696212769, 1.1427068710327148, 0.0632779449224472, -0.889705240726471, -0.4242522418498993, -0.12953487038612366, 0.5246710181236267, 0.8332856297492981, 0.5966053009033203, -0.007348260842263699, -0.6448197364807129, -1.115138292312622, -0.2774631083011627, -0.20345185697078705, 0.14228221774101257, 0.20754669606685638, 1.040298581123352, -0.24036386609077454, 0.6112220287322998, -1.016864538192749, -0.19084830582141876, 0.16784662008285522, -0.10483402013778687, 0.7830485701560974, 0.7609809041023254, 0.574874758720398, -0.7018795013427734, -0.5577712059020996, 0.14090187847614288, -0.8292631506919861, -0.07492291182279587, 0.17435134947299957, -0.31830546259880066, 0.12821780145168304, 0.09448275715112686, -0.7026134729385376, 0.5407531261444092, 0.21457533538341522, -1.144551396369934, 1.0838265419006348, -0.31255874037742615, 0.5530030727386475, -0.969175398349762, 0.1699564903974533, -0.09661772847175598, 0.03414120525121689, -0.5276951193809509, 0.015995630994439125, 0.10711580514907837, 0.4697067439556122, -0.5354654788970947, 0.8380615711212158, -0.6352905631065369, -0.050397150218486786, 0.5039726495742798, 0.13584290444850922, -0.10317763686180115, 0.34100142121315, -0.29773685336112976, 0.8242735862731934, 0.7706846594810486, -0.4715794622898102, 0.5083402991294861, 0.4440392851829529, -0.18566516041755676, 0.6684231162071228, -0.47054022550582886, -0.3126809298992157, 0.33073967695236206, -0.07050169259309769, -0.8616902828216553, -0.4804389178752899, 0.07646393030881882, -0.6014143824577332, -0.09959139674901962, 0.39350542426109314, -0.3132922947406769, -0.8410251140594482, -0.9501859545707703, 0.31415316462516785, 0.7700582146644592, -0.41921156644821167, -0.2029227912425995, 0.0613243542611599, 0.08135157823562622, -0.8065308928489685, -0.8313307762145996, -0.5034811496734619, -0.20273686945438385, -0.6971121430397034, 0.2999963164329529, -0.2741333842277527, -0.28863587975502014, -0.07232647389173508, -0.27855411171913147, -0.35267847776412964, 0.0041846539825201035, 0.12913590669631958, 0.7029902935028076, -0.4197508990764618, -0.24620890617370605, -0.2332748919725418, -0.15496157109737396, 0.26686012744903564, -0.08914094418287277, 0.360056072473526, -0.4686967730522156, -0.4089878797531128, -0.366684228181839, -0.02704569697380066, 0.7088258862495422, -0.04458130523562431, 0.7084895968437195, 0.3962137997150421, -0.3315224051475525, -0.052688922733068466, -0.2514308989048004, -0.2750304639339447, -0.5795634984970093, 0.24991561472415924, -0.48227059841156006, -1.028694748878479, 0.7723246216773987, 0.5640350580215454, 0.014180717058479786, 1.1373543739318848, 0.5860361456871033, -0.3052386939525604, 1.0412464141845703, 0.03420485928654671, 0.36320534348487854, 0.38294482231140137, -0.6579803824424744, 0.12124178558588028, -0.9152416586875916, -0.3435218930244446, -0.5961845517158508, -0.47260618209838867, -0.6750780344009399, -0.03412654623389244, 0.24086950719356537, 0.15846100449562073, -0.7003283500671387, 0.566864550113678, -0.8295778036117554, 0.583055853843689, 0.5766815543174744, 0.2650095224380493, 0.1633252650499344, -0.18503780663013458, -0.42078983783721924, -0.12122252583503723, -0.5145816206932068, -0.27493733167648315, 1.2175469398498535, 0.2501180171966553, 0.720654308795929, 0.06702635437250137, 0.8849365711212158, 0.10222887992858887, -0.03630968928337097, -0.5742447972297668, 0.651970386505127, 0.09393452107906342, -0.8310020565986633, -0.446333110332489, -0.4720154404640198, -1.0716620683670044, 0.40467265248298645, -0.13696303963661194, -0.8353606462478638, 0.08889773488044739, 0.008233724161982536, -0.20876158773899078, 0.4672822654247284, -0.5623124241828918, 0.8600994348526001, -0.12273930013179779, -0.4887097477912903, 0.13402068614959717, -0.8449984192848206, 0.4625222384929657, 0.1902073323726654, 0.2752552032470703, 0.006866676267236471, 0.26389604806900024, 1.1790437698364258, -0.8003261685371399, 0.4120701551437378, 0.09092450141906738, 0.022151431068778038, 0.3376340866088867, -0.16471372544765472, 0.47140708565711975, 0.09332704544067383, -0.02104709856212139, -0.14969943463802338, 0.31588491797447205, -0.8889451622962952, -0.05642741173505783, 0.909813404083252, -0.928460419178009, -0.5978940725326538, -0.8978675603866577, -0.5604167580604553, 0.061041031032800674, 0.5877066850662231, 0.3461449146270752, 0.5354640483856201, -0.010097923688590527, 0.4194318950176239, 0.8636763095855713, -0.11369267851114273, 0.6147603392601013, 0.23464655876159668, 0.044732868671417236, -0.6185946464538574, 0.8110110759735107, 0.09032110124826431, 0.3567068576812744, 0.223810613155365, 0.4148326516151428, -0.5824488401412964, -0.1974627822637558, -0.22550001740455627, 0.4754732549190521, -0.6397218704223633, -0.25933966040611267, -0.35418063402175903, -0.4102659821510315, -0.7790158987045288, -0.6339249610900879, -0.3164633810520172, -0.5168017148971558, -0.47168925404548645, -0.4926728308200836, 0.5322877168655396, 0.4490954279899597, -0.3433517813682556, 0.008639652281999588, -0.4964558184146881, 0.25267884135246277, 0.3632317781448364, 0.5810632109642029, -0.40571334958076477, -0.5637062191963196, 0.06245683506131172, -0.1343664824962616, -0.5218086242675781, -0.9299870729446411, 0.3382532298564911, -0.04657487943768501, 0.5246342420578003, 0.5786306262016296, 0.10748331248760223, 0.8198001980781555, -0.21631111204624176, 1.071259617805481, 0.3151908814907074, -0.8049488663673401, 0.7516497373580933, -0.3299156129360199, 0.1734183430671692, 0.646624743938446, 0.14125163853168488, -0.17527425289154053, -0.6843602061271667, -1.3457473516464233, -0.793608546257019, 0.6660822629928589, 0.4161136746406555, -0.198378786444664, 0.035325001925230026, 0.11687152087688446, -0.2625999450683594, -0.17362923920154572, -0.6420746445655823, -0.8985186815261841, -0.14988349378108978, -0.5393155813217163, 0.10276543349027634, 0.021865736693143845, -0.4245355725288391, -0.8733762502670288, 0.9172245860099792, 0.037069398909807205, 0.5457360744476318, 0.4776708483695984, 0.08500030636787415, 0.04773150756955147, 0.5225576162338257, 0.9788704514503479, 0.7763030529022217, -0.4616973102092743, 0.4623565673828125, 0.40152299404144287, -1.072525978088379, 0.5147206783294678, 0.3380396068096161, -0.1049172505736351, -0.008110075257718563, 0.48342034220695496, 0.3965747654438019, 0.0652090534567833, -0.24066945910453796, 0.6387830376625061, -0.017084868624806404, -0.583998441696167, -0.39301300048828125, 0.13631729781627655, -0.14846423268318176, -0.022427907213568687, 0.43246543407440186, -0.17205452919006348, -0.06019522249698639, -0.49052807688713074, 0.5179710984230042, 0.3679056465625763, -0.486235111951828, -0.1575637012720108, 0.7140222191810608, -0.22359777987003326, -0.18844182789325714, 0.3183867037296295, -0.20295383036136627, -0.6690493226051331, 1.1738237142562866, 0.6335834860801697, 0.6746028661727905, -0.24366390705108643, -0.04413478076457977, 0.9197676777839661, 0.39058393239974976, -0.022925356402993202, 0.5292007923126221, 0.3444652259349823, -0.25026512145996094, 0.1972782015800476, -0.8534156084060669, -0.06956014782190323, 0.1388721615076065, -0.81257164478302, 0.3077198565006256, -0.49515822529792786, -0.1718149483203888, -0.021228807047009468, 0.4284224510192871, -0.464754581451416, 0.5904757976531982, -0.40276655554771423, 1.227803111076355, -0.9749413728713989, 0.7335156798362732, 0.7596756219863892, -0.5266105532646179, -1.0531648397445679, -0.5319926142692566, -0.0068784733302891254, -0.7734931707382202, 0.6249919533729553, -0.0640636458992958, 0.18584395945072174, -0.030067801475524902, -0.744331955909729, -0.9112454652786255, 1.3678292036056519, -0.10405497997999191, -0.42661052942276, 0.23830659687519073, -0.06293535232543945, 0.47458022832870483, 0.13242654502391815, 0.5451831221580505, 0.7414047718048096, 0.8255555629730225, -0.10470365732908249, -0.706034779548645, 0.3531067967414856, -0.5365699529647827, -0.35095930099487305, 0.45779943466186523, -0.8674116730690002, 1.2350512742996216, -0.011506530456244946, 0.1797136664390564, -0.2349586933851242, 0.5942932367324829, 0.8064246773719788, 0.3435463607311249, 0.3699226677417755, 0.8768422603607178, 0.8270133137702942, -0.5014687180519104, 1.0323741436004639, -0.21045702695846558, 0.9032194018363953, 0.7355464696884155, 0.23872721195220947, 0.7433838248252869, 0.7046998739242554, -0.6010686755180359, 0.5286548137664795, 0.8081331849098206, -0.31169411540031433, 0.39241892099380493, 0.28845638036727905, -0.11868578940629959, -0.13331641256809235, 0.4463702440261841, -0.8854217529296875, 0.07412642240524292, 0.05670076608657837, -0.3261498510837555, 0.06041475757956505, -0.4298861622810364, 0.3107962906360626, -0.08687662333250046, -0.023514412343502045, 0.3380865752696991, 0.03456227108836174, -0.4028489887714386, 0.9222041368484497, -0.20586508512496948, 0.7471399903297424, -0.5544139742851257, -0.09012876451015472, -0.37650948762893677, 0.5883521437644958, -0.41008374094963074, -1.0497568845748901, 0.18013101816177368, 0.05615438148379326, -0.12475503236055374, -0.1810133308172226, 0.7434237003326416, -0.211595818400383, -0.7826811075210571, 0.13854792714118958, 0.10709301382303238, 0.04554443433880806, 0.5356059074401855, -0.6697638034820557, -0.33775508403778076, -0.09711465984582901, -0.5922903418540955, 0.10670839250087738, 0.2634478509426117, 0.2998252809047699, 0.5653252601623535, 0.6617695093154907, 0.1726505011320114, 0.45624881982803345, -0.5388551950454712, 0.8145508766174316, -1.0433748960494995, -0.7181912064552307, -0.9305453300476074, 0.4707881212234497, -0.323637992143631, -0.8950523734092712, 1.0291557312011719, 1.0394185781478882, 0.9271816611289978, -0.0248929001390934, 0.6327876448631287, -0.39823266863822937, 0.2636237144470215, -0.36210551857948303, 0.9582460522651672, -0.8444181084632874, -0.2630747854709625, -0.25498872995376587, -0.6922438144683838, -0.4268428385257721, 0.8177176713943481, -0.16763164103031158, 0.016113201156258583, 1.0724239349365234, 0.6289951205253601, -0.10775771737098694, 0.04008421301841736, -0.03110952489078045, 0.5527458786964417, 0.3707596957683563, 0.9769389033317566, 0.6530478596687317, -0.8162243366241455, 0.3364937901496887, -0.5379331111907959, -0.37601611018180847, -0.3909417390823364, -0.43659737706184387, -0.8478702902793884, -0.47663116455078125, -0.21903401613235474, -0.61400306224823, -0.10600695759057999, 1.0063889026641846, 0.4603157639503479, -0.9346091747283936, -0.35627302527427673, -0.10644400119781494, 0.19717906415462494, -0.5887064337730408, -0.42309027910232544, 0.7276942133903503, -0.14258424937725067, -0.5381721258163452, 0.17309367656707764, -0.15846948325634003, 0.23882272839546204, 0.11833035945892334, -0.42390745878219604, -0.7468481063842773, 0.05528957396745682, 0.45057785511016846, 0.3121887147426605, -0.7056626677513123, -0.7361329197883606, 0.34040912985801697, -0.5930010080337524, 0.4309599697589874, -0.03099239617586136, -0.5190767645835876, 0.05583265423774719, 0.695929229259491, 0.4596816897392273, 0.6596519947052002, -0.02512669563293457, 0.024699386209249496, -0.6611188054084778, 0.17591194808483124, 0.03643154352903366, 0.23941735923290253, -0.06809631735086441, -0.2960547208786011, 0.7902209162712097, 0.6724040508270264, -0.49888622760772705, -1.1053813695907593, -0.4248887896537781, -1.4081286191940308, -0.04954107478260994, 1.1081215143203735, 0.005721217021346092, -0.49706006050109863, 0.21138282120227814, -0.12347754836082458, 0.2162594050168991, -0.2829889953136444, 0.7838357090950012, 0.8271689414978027, -0.3721979558467865, 0.11452921479940414, -0.6569764614105225, 0.36199846863746643, 0.5319753289222717, -1.213958501815796, -0.08257092535495758, 0.23325851559638977, 0.3265223205089569, 0.32943975925445557, 0.6463598012924194, -0.1115240678191185, 0.2512471675872803, 0.281882107257843, 0.03154795616865158, -0.022417977452278137, 0.05898984894156456, -0.23431262373924255, 0.08866146206855774, -0.28953516483306885, -0.45247817039489746 ]
open-llm-leaderboard/details_Kiddyz__testlm2
open-llm-leaderboard
2023-08-27T12:38:57Z
201
0
[ "region:us" ]
null
2023-08-18T11:59:18Z
--- pretty_name: Evaluation run of Kiddyz/testlm2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Kiddyz/testlm2](https://huggingface.co/Kiddyz/testlm2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kiddyz__testlm2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-17T13:24:31.212024](https://huggingface.co/datasets/open-llm-leaderboard/details_Kiddyz__testlm2/blob/main/results_2023-08-17T13%3A24%3A31.212024.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5159377671021478,\n\ \ \"acc_stderr\": 0.03487459607154835,\n \"acc_norm\": 0.519591954426014,\n\ \ \"acc_norm_stderr\": 0.03486307795106439,\n \"mc1\": 0.3268053855569155,\n\ \ \"mc1_stderr\": 0.01641987473113503,\n \"mc2\": 0.4867717626441099,\n\ \ \"mc2_stderr\": 0.015479620334955967\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.49829351535836175,\n \"acc_stderr\": 0.014611305705056987,\n\ \ \"acc_norm\": 0.5298634812286689,\n \"acc_norm_stderr\": 0.0145853058400071\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5723959370643298,\n\ \ \"acc_stderr\": 0.004937199759947679,\n \"acc_norm\": 0.7564230233021311,\n\ \ \"acc_norm_stderr\": 0.004283630516444485\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n\ \ \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n\ \ \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04068942293855797,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04068942293855797\n },\n\ \ \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \ \ \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\"\ : 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"\ acc\": 0.5584905660377358,\n \"acc_stderr\": 0.030561590426731833,\n \ \ \"acc_norm\": 0.5584905660377358,\n \"acc_norm_stderr\": 0.030561590426731833\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5347222222222222,\n\ \ \"acc_stderr\": 0.04171115858181618,\n \"acc_norm\": 0.5347222222222222,\n\ \ \"acc_norm_stderr\": 0.04171115858181618\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \ \ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\"\ : 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4508670520231214,\n\ \ \"acc_stderr\": 0.03794012674697029,\n \"acc_norm\": 0.4508670520231214,\n\ \ \"acc_norm_stderr\": 0.03794012674697029\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.039505818611799616,\n\ \ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.039505818611799616\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n\ \ \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4297872340425532,\n \"acc_stderr\": 0.03236214467715563,\n\ \ \"acc_norm\": 0.4297872340425532,\n \"acc_norm_stderr\": 0.03236214467715563\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n\ \ \"acc_stderr\": 0.043727482902780064,\n \"acc_norm\": 0.3157894736842105,\n\ \ \"acc_norm_stderr\": 0.043727482902780064\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.041546596717075474,\n\ \ \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.041546596717075474\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.291005291005291,\n \"acc_stderr\": 0.023393826500484875,\n \"\ acc_norm\": 0.291005291005291,\n \"acc_norm_stderr\": 0.023393826500484875\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.04216370213557835,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.04216370213557835\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5741935483870968,\n\ \ \"acc_stderr\": 0.028129112709165904,\n \"acc_norm\": 0.5741935483870968,\n\ \ \"acc_norm_stderr\": 0.028129112709165904\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.3842364532019704,\n \"acc_stderr\": 0.0342239856565755,\n\ \ \"acc_norm\": 0.3842364532019704,\n \"acc_norm_stderr\": 0.0342239856565755\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\"\ : 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.036639749943912434,\n\ \ \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.036639749943912434\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6515151515151515,\n \"acc_stderr\": 0.03394853965156402,\n \"\ acc_norm\": 0.6515151515151515,\n \"acc_norm_stderr\": 0.03394853965156402\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7564766839378239,\n \"acc_stderr\": 0.030975436386845426,\n\ \ \"acc_norm\": 0.7564766839378239,\n \"acc_norm_stderr\": 0.030975436386845426\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4794871794871795,\n \"acc_stderr\": 0.025329663163489943,\n\ \ \"acc_norm\": 0.4794871794871795,\n \"acc_norm_stderr\": 0.025329663163489943\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \ \ \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.48739495798319327,\n \"acc_stderr\": 0.032468167657521745,\n\ \ \"acc_norm\": 0.48739495798319327,\n \"acc_norm_stderr\": 0.032468167657521745\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"\ acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.6880733944954128,\n \"acc_stderr\": 0.019862967976707245,\n \"\ acc_norm\": 0.6880733944954128,\n \"acc_norm_stderr\": 0.019862967976707245\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"\ acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7401960784313726,\n \"acc_stderr\": 0.030778554678693247,\n \"\ acc_norm\": 0.7401960784313726,\n \"acc_norm_stderr\": 0.030778554678693247\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7215189873417721,\n \"acc_stderr\": 0.029178682304842548,\n \ \ \"acc_norm\": 0.7215189873417721,\n \"acc_norm_stderr\": 0.029178682304842548\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n\ \ \"acc_stderr\": 0.03227790442850499,\n \"acc_norm\": 0.6367713004484304,\n\ \ \"acc_norm_stderr\": 0.03227790442850499\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6106870229007634,\n \"acc_stderr\": 0.04276486542814591,\n\ \ \"acc_norm\": 0.6106870229007634,\n \"acc_norm_stderr\": 0.04276486542814591\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6942148760330579,\n \"acc_stderr\": 0.042059539338841226,\n \"\ acc_norm\": 0.6942148760330579,\n \"acc_norm_stderr\": 0.042059539338841226\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5833333333333334,\n\ \ \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.5833333333333334,\n\ \ \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5828220858895705,\n \"acc_stderr\": 0.0387410285981808,\n\ \ \"acc_norm\": 0.5828220858895705,\n \"acc_norm_stderr\": 0.0387410285981808\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n\ \ \"acc_stderr\": 0.04521829902833585,\n \"acc_norm\": 0.3482142857142857,\n\ \ \"acc_norm_stderr\": 0.04521829902833585\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n\ \ \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n\ \ \"acc_stderr\": 0.027778835904935434,\n \"acc_norm\": 0.7649572649572649,\n\ \ \"acc_norm_stderr\": 0.027778835904935434\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \ \ \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7100893997445722,\n\ \ \"acc_stderr\": 0.01622501794477097,\n \"acc_norm\": 0.7100893997445722,\n\ \ \"acc_norm_stderr\": 0.01622501794477097\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5375722543352601,\n \"acc_stderr\": 0.026842985519615375,\n\ \ \"acc_norm\": 0.5375722543352601,\n \"acc_norm_stderr\": 0.026842985519615375\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2927374301675978,\n\ \ \"acc_stderr\": 0.015218109544410184,\n \"acc_norm\": 0.2927374301675978,\n\ \ \"acc_norm_stderr\": 0.015218109544410184\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.028452639985088006,\n\ \ \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.028452639985088006\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5980707395498392,\n\ \ \"acc_stderr\": 0.027846476005930473,\n \"acc_norm\": 0.5980707395498392,\n\ \ \"acc_norm_stderr\": 0.027846476005930473\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.027586006221607708,\n\ \ \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.027586006221607708\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4078014184397163,\n \"acc_stderr\": 0.02931601177634356,\n \ \ \"acc_norm\": 0.4078014184397163,\n \"acc_norm_stderr\": 0.02931601177634356\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3898305084745763,\n\ \ \"acc_stderr\": 0.012456386619082604,\n \"acc_norm\": 0.3898305084745763,\n\ \ \"acc_norm_stderr\": 0.012456386619082604\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.45955882352941174,\n \"acc_stderr\": 0.03027332507734576,\n\ \ \"acc_norm\": 0.45955882352941174,\n \"acc_norm_stderr\": 0.03027332507734576\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5016339869281046,\n \"acc_stderr\": 0.020227726838150124,\n \ \ \"acc_norm\": 0.5016339869281046,\n \"acc_norm_stderr\": 0.020227726838150124\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n\ \ \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n\ \ \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.03093285879278985,\n\ \ \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.03093285879278985\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6915422885572139,\n\ \ \"acc_stderr\": 0.03265819588512697,\n \"acc_norm\": 0.6915422885572139,\n\ \ \"acc_norm_stderr\": 0.03265819588512697\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n\ \ \"acc_stderr\": 0.038367221765980515,\n \"acc_norm\": 0.41566265060240964,\n\ \ \"acc_norm_stderr\": 0.038367221765980515\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7076023391812866,\n \"acc_stderr\": 0.03488647713457922,\n\ \ \"acc_norm\": 0.7076023391812866,\n \"acc_norm_stderr\": 0.03488647713457922\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3268053855569155,\n\ \ \"mc1_stderr\": 0.01641987473113503,\n \"mc2\": 0.4867717626441099,\n\ \ \"mc2_stderr\": 0.015479620334955967\n }\n}\n```" repo_url: https://huggingface.co/Kiddyz/testlm2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|arc:challenge|25_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hellaswag|10_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T13:24:31.212024.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:24:31.212024.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T13_24_31.212024 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T13:24:31.212024.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T13:24:31.212024.parquet' - config_name: results data_files: - split: 2023_08_17T13_24_31.212024 path: - results_2023-08-17T13:24:31.212024.parquet - split: latest path: - results_2023-08-17T13:24:31.212024.parquet --- # Dataset Card for Evaluation run of Kiddyz/testlm2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Kiddyz/testlm2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Kiddyz/testlm2](https://huggingface.co/Kiddyz/testlm2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kiddyz__testlm2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T13:24:31.212024](https://huggingface.co/datasets/open-llm-leaderboard/details_Kiddyz__testlm2/blob/main/results_2023-08-17T13%3A24%3A31.212024.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5159377671021478, "acc_stderr": 0.03487459607154835, "acc_norm": 0.519591954426014, "acc_norm_stderr": 0.03486307795106439, "mc1": 0.3268053855569155, "mc1_stderr": 0.01641987473113503, "mc2": 0.4867717626441099, "mc2_stderr": 0.015479620334955967 }, "harness|arc:challenge|25": { "acc": 0.49829351535836175, "acc_stderr": 0.014611305705056987, "acc_norm": 0.5298634812286689, "acc_norm_stderr": 0.0145853058400071 }, "harness|hellaswag|10": { "acc": 0.5723959370643298, "acc_stderr": 0.004937199759947679, "acc_norm": 0.7564230233021311, "acc_norm_stderr": 0.004283630516444485 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5, "acc_stderr": 0.04068942293855797, "acc_norm": 0.5, "acc_norm_stderr": 0.04068942293855797 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5584905660377358, "acc_stderr": 0.030561590426731833, "acc_norm": 0.5584905660377358, "acc_norm_stderr": 0.030561590426731833 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5347222222222222, "acc_stderr": 0.04171115858181618, "acc_norm": 0.5347222222222222, "acc_norm_stderr": 0.04171115858181618 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4508670520231214, "acc_stderr": 0.03794012674697029, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.03794012674697029 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.039505818611799616, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.039505818611799616 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4297872340425532, "acc_stderr": 0.03236214467715563, "acc_norm": 0.4297872340425532, "acc_norm_stderr": 0.03236214467715563 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780064, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780064 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.041546596717075474, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.041546596717075474 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.291005291005291, "acc_stderr": 0.023393826500484875, "acc_norm": 0.291005291005291, "acc_norm_stderr": 0.023393826500484875 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5741935483870968, "acc_stderr": 0.028129112709165904, "acc_norm": 0.5741935483870968, "acc_norm_stderr": 0.028129112709165904 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3842364532019704, "acc_stderr": 0.0342239856565755, "acc_norm": 0.3842364532019704, "acc_norm_stderr": 0.0342239856565755 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6727272727272727, "acc_stderr": 0.036639749943912434, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.036639749943912434 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6515151515151515, "acc_stderr": 0.03394853965156402, "acc_norm": 0.6515151515151515, "acc_norm_stderr": 0.03394853965156402 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7564766839378239, "acc_stderr": 0.030975436386845426, "acc_norm": 0.7564766839378239, "acc_norm_stderr": 0.030975436386845426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4794871794871795, "acc_stderr": 0.025329663163489943, "acc_norm": 0.4794871794871795, "acc_norm_stderr": 0.025329663163489943 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.48739495798319327, "acc_stderr": 0.032468167657521745, "acc_norm": 0.48739495798319327, "acc_norm_stderr": 0.032468167657521745 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6880733944954128, "acc_stderr": 0.019862967976707245, "acc_norm": 0.6880733944954128, "acc_norm_stderr": 0.019862967976707245 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7401960784313726, "acc_stderr": 0.030778554678693247, "acc_norm": 0.7401960784313726, "acc_norm_stderr": 0.030778554678693247 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7215189873417721, "acc_stderr": 0.029178682304842548, "acc_norm": 0.7215189873417721, "acc_norm_stderr": 0.029178682304842548 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6367713004484304, "acc_stderr": 0.03227790442850499, "acc_norm": 0.6367713004484304, "acc_norm_stderr": 0.03227790442850499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.04276486542814591, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6942148760330579, "acc_stderr": 0.042059539338841226, "acc_norm": 0.6942148760330579, "acc_norm_stderr": 0.042059539338841226 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356461, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5828220858895705, "acc_stderr": 0.0387410285981808, "acc_norm": 0.5828220858895705, "acc_norm_stderr": 0.0387410285981808 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833585, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833585 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935434, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935434 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7100893997445722, "acc_stderr": 0.01622501794477097, "acc_norm": 0.7100893997445722, "acc_norm_stderr": 0.01622501794477097 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5375722543352601, "acc_stderr": 0.026842985519615375, "acc_norm": 0.5375722543352601, "acc_norm_stderr": 0.026842985519615375 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2927374301675978, "acc_stderr": 0.015218109544410184, "acc_norm": 0.2927374301675978, "acc_norm_stderr": 0.015218109544410184 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5555555555555556, "acc_stderr": 0.028452639985088006, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.028452639985088006 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5980707395498392, "acc_stderr": 0.027846476005930473, "acc_norm": 0.5980707395498392, "acc_norm_stderr": 0.027846476005930473 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5648148148148148, "acc_stderr": 0.027586006221607708, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.027586006221607708 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4078014184397163, "acc_stderr": 0.02931601177634356, "acc_norm": 0.4078014184397163, "acc_norm_stderr": 0.02931601177634356 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3898305084745763, "acc_stderr": 0.012456386619082604, "acc_norm": 0.3898305084745763, "acc_norm_stderr": 0.012456386619082604 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.45955882352941174, "acc_stderr": 0.03027332507734576, "acc_norm": 0.45955882352941174, "acc_norm_stderr": 0.03027332507734576 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5016339869281046, "acc_stderr": 0.020227726838150124, "acc_norm": 0.5016339869281046, "acc_norm_stderr": 0.020227726838150124 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.03093285879278985, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.03093285879278985 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6915422885572139, "acc_stderr": 0.03265819588512697, "acc_norm": 0.6915422885572139, "acc_norm_stderr": 0.03265819588512697 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7076023391812866, "acc_stderr": 0.03488647713457922, "acc_norm": 0.7076023391812866, "acc_norm_stderr": 0.03488647713457922 }, "harness|truthfulqa:mc|0": { "mc1": 0.3268053855569155, "mc1_stderr": 0.01641987473113503, "mc2": 0.4867717626441099, "mc2_stderr": 0.015479620334955967 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.704179048538208, -0.8531977534294128, 0.2434178739786148, 0.20569108426570892, -0.16208742558956146, -0.047295987606048584, 0.04269736260175705, -0.2409629225730896, 0.4986087679862976, -0.10419712215662003, -0.5111563801765442, -0.6834871768951416, -0.4521249234676361, 0.1979350745677948, -0.04884939640760422, 0.8254392743110657, -0.18129034340381622, -0.1294613927602768, 0.06304557621479034, 0.015748925507068634, -0.26362451910972595, -0.34923601150512695, -0.49561169743537903, -0.3523087203502655, 0.1640571653842926, 0.455526202917099, 0.4596232771873474, 0.7955244183540344, 0.6410688757896423, 0.27088621258735657, -0.3316059708595276, -0.011014404706656933, -0.16090746223926544, -0.27362918853759766, 0.39533647894859314, -0.33641719818115234, -0.8225275278091431, 0.3156290054321289, 0.7614783048629761, 0.6200205087661743, -0.07964739948511124, 0.30654099583625793, 0.035625748336315155, 0.5728610157966614, -0.3744719922542572, 0.03983701393008232, -0.2735982835292816, 0.24637646973133087, -0.17094901204109192, -0.24777062237262726, -0.3314957916736603, -0.23777222633361816, -0.14643530547618866, -0.8421464562416077, 0.2837506830692291, 0.32120490074157715, 1.5501961708068848, -0.17200705409049988, -0.2773682177066803, 0.09469576179981232, -0.11980282515287399, 1.0200059413909912, -0.8883551955223083, 0.36062079668045044, 0.7907199859619141, 0.12358307838439941, -0.1630096435546875, -0.5858974456787109, -0.6448332667350769, 0.09140925109386444, -0.40380096435546875, 0.3562485873699188, -0.04800274595618248, -0.19886384904384613, 0.3892328441143036, 0.6152130365371704, -0.7081301212310791, 0.15426777303218842, -0.6376640200614929, -0.16214661300182343, 1.114732265472412, 0.3559872508049011, 0.10267002135515213, -0.3482101261615753, -0.715979814529419, -0.6606181263923645, -0.41454243659973145, 0.30153438448905945, 0.4364732503890991, 0.36564216017723083, -0.3933921158313751, 0.6943843364715576, -0.3971008062362671, 0.5620695948600769, 0.38221538066864014, 0.03972337394952774, 0.8900140523910522, -0.6821792125701904, -0.5361778140068054, -0.06161947175860405, 1.0740857124328613, 0.5709429979324341, 0.07539916038513184, 0.23177683353424072, 0.00647174334153533, -0.11172312498092651, 0.03313492611050606, -0.8614455461502075, -0.3095528781414032, 0.14694344997406006, -0.3993084132671356, -0.48138827085494995, 0.3721790909767151, -0.8945361375808716, 0.16467434167861938, -0.048591248691082, 0.4282267093658447, -0.47770383954048157, -0.11664513498544693, 0.21525999903678894, -0.41415342688560486, 0.8441309928894043, -0.18249095976352692, -0.8093572854995728, 0.4250832498073578, 0.5252252817153931, 0.7462431788444519, -0.11215093731880188, -0.4490448832511902, -0.11263268440961838, -0.11296934634447098, -0.32275712490081787, 0.5295217633247375, -0.26415902376174927, -0.430704265832901, -0.25047826766967773, 0.2890796661376953, -0.3039093315601349, -0.3530636727809906, 0.7248297333717346, -0.24103675782680511, 0.24342772364616394, -0.4182821214199066, -0.6467099785804749, 0.16366061568260193, 0.38200899958610535, -0.3752115070819855, 1.3647950887680054, 0.30094364285469055, -0.802039623260498, 0.42743611335754395, -0.5643414855003357, -0.20306815207004547, -0.03499671071767807, -0.021662002429366112, -0.7993468642234802, -0.26835396885871887, 0.13767392933368683, 0.3565119802951813, -0.11217613518238068, -0.10775148123502731, -0.3699752688407898, -0.37781673669815063, 0.3510819673538208, -0.15761590003967285, 1.240440011024475, -0.045933645218610764, -0.7797695994377136, -0.09157709032297134, -1.2198200225830078, 0.359950989484787, 0.2163587063550949, -0.36434003710746765, -0.1945369690656662, -0.4664348363876343, 0.003656675573438406, 0.25302091240882874, 0.2552652359008789, -0.7954887747764587, 0.28032565116882324, -0.37935009598731995, 0.14351709187030792, 1.253021240234375, -0.003186497138813138, 0.13077926635742188, -0.5659686923027039, 0.5090024471282959, 0.19074711203575134, 0.24332714080810547, 0.41964659094810486, -0.6330490708351135, -0.8020408153533936, -0.4571380913257599, -0.055711980909109116, 0.6101987361907959, -0.18145902454853058, 1.139340877532959, 0.07453608512878418, -0.8773000836372375, -0.4034661650657654, -0.1301843672990799, 0.5341004729270935, 0.8260185122489929, 0.6193273663520813, -0.014216122217476368, -0.6295653581619263, -1.1252497434616089, -0.2825705111026764, -0.19704580307006836, 0.11413807421922684, 0.23389145731925964, 1.047752022743225, -0.2555761933326721, 0.6071357727050781, -1.0375643968582153, -0.18054480850696564, 0.16149930655956268, -0.08822908252477646, 0.7631873488426208, 0.7565727829933167, 0.5841516256332397, -0.7000033259391785, -0.5553203821182251, 0.13974320888519287, -0.8306789398193359, -0.08974524587392807, 0.1514030545949936, -0.34036269783973694, 0.1400514394044876, 0.10919863730669022, -0.7139050960540771, 0.5442972183227539, 0.2227449119091034, -1.1482043266296387, 1.0875686407089233, -0.33188000321388245, 0.5489749908447266, -0.9715470671653748, 0.18165065348148346, -0.09755248576402664, 0.03858932852745056, -0.5269591808319092, 0.024577053263783455, 0.13958469033241272, 0.4661441147327423, -0.5172743201255798, 0.8343783020973206, -0.6740050911903381, -0.08318966627120972, 0.46409305930137634, 0.15124747157096863, -0.09801091998815536, 0.34159332513809204, -0.26140058040618896, 0.8388774991035461, 0.7623440623283386, -0.4670504927635193, 0.525428295135498, 0.4352026879787445, -0.1781804859638214, 0.689185619354248, -0.47726330161094666, -0.2979212701320648, 0.3289426565170288, -0.055560726672410965, -0.8680319786071777, -0.48933717608451843, 0.055578406900167465, -0.6110742688179016, -0.07368707656860352, 0.38570159673690796, -0.29806551337242126, -0.8415290713310242, -0.9772722125053406, 0.31979209184646606, 0.7791525721549988, -0.4369717240333557, -0.20445393025875092, 0.05784013122320175, 0.07795291393995285, -0.8105619549751282, -0.8491848111152649, -0.4891708493232727, -0.18960356712341309, -0.7117813229560852, 0.33324334025382996, -0.2760697305202484, -0.27927249670028687, -0.06453302502632141, -0.24464461207389832, -0.34509727358818054, -0.0011525515001267195, 0.13552474975585938, 0.7105013728141785, -0.41438162326812744, -0.2476564198732376, -0.2235165238380432, -0.15368343889713287, 0.2749243676662445, -0.09061384946107864, 0.3699570894241333, -0.44196149706840515, -0.40756338834762573, -0.3898725211620331, -0.014194406569004059, 0.6989352107048035, -0.04731607809662819, 0.7296095490455627, 0.4089958369731903, -0.32687604427337646, -0.05980663001537323, -0.26216620206832886, -0.28158846497535706, -0.5778939127922058, 0.26266130805015564, -0.47764769196510315, -1.0495092868804932, 0.7470313906669617, 0.5664908289909363, 0.04998596012592316, 1.1557888984680176, 0.6062803864479065, -0.3246386647224426, 1.0496604442596436, 0.024239432066679, 0.3852728605270386, 0.3759470582008362, -0.6647711396217346, 0.10469400882720947, -0.9478848576545715, -0.3633895516395569, -0.5963049530982971, -0.4813792109489441, -0.676458477973938, -0.07961489260196686, 0.2295134961605072, 0.16415563225746155, -0.7071347832679749, 0.5642959475517273, -0.8295385837554932, 0.5596377849578857, 0.5815197229385376, 0.2488989681005478, 0.14170381426811218, -0.16686256229877472, -0.40071338415145874, -0.12044094502925873, -0.47882336378097534, -0.25666049122810364, 1.2207026481628418, 0.25611287355422974, 0.677273690700531, 0.11119005084037781, 0.8610100150108337, 0.12617847323417664, -0.05186589062213898, -0.5929592251777649, 0.6390997171401978, 0.1030501127243042, -0.8249571323394775, -0.4341983199119568, -0.4785515069961548, -1.0716853141784668, 0.3995640277862549, -0.13112837076187134, -0.8593447208404541, 0.10036604106426239, 0.018387073650956154, -0.19528184831142426, 0.47985410690307617, -0.5484451055526733, 0.826606035232544, -0.12297174334526062, -0.49596649408340454, 0.11777637153863907, -0.8627405166625977, 0.480905681848526, 0.1867835819721222, 0.27826911211013794, 0.03725247457623482, 0.24610409140586853, 1.1894043684005737, -0.820482075214386, 0.41236600279808044, 0.07013482600450516, 0.034290820360183716, 0.35265663266181946, -0.17179742455482483, 0.5113316178321838, 0.09621285647153854, -0.010825234465301037, -0.16043700277805328, 0.3081902265548706, -0.8759512901306152, -0.07126092165708542, 0.9205081462860107, -0.9558977484703064, -0.5982809662818909, -0.9007347226142883, -0.550287663936615, 0.03581630811095238, 0.5692997574806213, 0.3385719954967499, 0.5371752381324768, 0.018600551411509514, 0.43556082248687744, 0.8610666394233704, -0.11946077644824982, 0.6396109461784363, 0.23069973289966583, 0.07880669087171555, -0.6302118897438049, 0.813999593257904, 0.07927951961755753, 0.37312060594558716, 0.23982693254947662, 0.4065837860107422, -0.5991513133049011, -0.21656344830989838, -0.24676916003227234, 0.497341126203537, -0.6241313219070435, -0.2597925662994385, -0.3619363605976105, -0.4131099581718445, -0.7986809611320496, -0.6262131333351135, -0.2981937825679779, -0.5116573572158813, -0.4764811396598816, -0.49434635043144226, 0.530626654624939, 0.46130749583244324, -0.36708754301071167, 0.004716058727353811, -0.49795112013816833, 0.26565930247306824, 0.3793543875217438, 0.5752214789390564, -0.38069063425064087, -0.5892684459686279, 0.06496760994195938, -0.11716759204864502, -0.543376088142395, -0.93471360206604, 0.31745144724845886, -0.040759120136499405, 0.5196155905723572, 0.5590133666992188, 0.09594015032052994, 0.8198449015617371, -0.21111680567264557, 1.091293215751648, 0.33309364318847656, -0.7914608120918274, 0.745307445526123, -0.3205319941043854, 0.1477634310722351, 0.6371076107025146, 0.1250060647726059, -0.1853753626346588, -0.659755289554596, -1.330593466758728, -0.7986321449279785, 0.6801508069038391, 0.4199739396572113, -0.19393225014209747, 0.054299209266901016, 0.13008162379264832, -0.302977979183197, -0.20201584696769714, -0.6369081735610962, -0.8889686465263367, -0.140290305018425, -0.517950177192688, 0.10334674268960953, -0.03214847669005394, -0.42346733808517456, -0.8404209017753601, 0.9300033450126648, 0.02835996448993683, 0.5660800933837891, 0.463894248008728, 0.07050915062427521, 0.05313294380903244, 0.5050784349441528, 0.9867090582847595, 0.7566200494766235, -0.4519003927707672, 0.46176719665527344, 0.40428170561790466, -1.0770184993743896, 0.5011652708053589, 0.3302297294139862, -0.1167534664273262, -0.013415449298918247, 0.49926742911338806, 0.4040713608264923, 0.052764516323804855, -0.23949240148067474, 0.6631512641906738, -0.012081975117325783, -0.577383279800415, -0.39729684591293335, 0.13241678476333618, -0.128691628575325, -0.019315548241138458, 0.4155057370662689, -0.17183205485343933, -0.05253390595316887, -0.5083165764808655, 0.5107156038284302, 0.3639836013317108, -0.4966665208339691, -0.14941224455833435, 0.7240356802940369, -0.20865441858768463, -0.18625904619693756, 0.33819329738616943, -0.1796131730079651, -0.6475818753242493, 1.1781673431396484, 0.647145688533783, 0.6686496138572693, -0.2492467612028122, -0.05533677712082863, 0.9273322224617004, 0.3686167001724243, -0.043866682797670364, 0.5268526673316956, 0.33226478099823, -0.23145809769630432, 0.21171092987060547, -0.8563297986984253, -0.04213045537471771, 0.13130538165569305, -0.8169609308242798, 0.30295538902282715, -0.5032945275306702, -0.18441201746463776, -0.02600345015525818, 0.4258369505405426, -0.45169466733932495, 0.5899200439453125, -0.397795170545578, 1.2239251136779785, -0.9795618057250977, 0.7291591763496399, 0.7526246905326843, -0.5090889930725098, -1.076797366142273, -0.5230709910392761, 0.024628981947898865, -0.7858428359031677, 0.6243400573730469, -0.05211815983057022, 0.1736402064561844, -0.05289704352617264, -0.7017136812210083, -0.9181314706802368, 1.389715313911438, -0.12261056154966354, -0.4247110188007355, 0.23673692345619202, -0.060237348079681396, 0.47859978675842285, 0.14711539447307587, 0.578301191329956, 0.7617518305778503, 0.8194896578788757, -0.0886848121881485, -0.7166406512260437, 0.35304516553878784, -0.5340566039085388, -0.32471612095832825, 0.450809121131897, -0.8889747262001038, 1.2282416820526123, -0.023167071864008904, 0.18747587502002716, -0.21832478046417236, 0.6045097708702087, 0.8138145208358765, 0.35044732689857483, 0.3533932566642761, 0.8671659827232361, 0.8211620450019836, -0.51346755027771, 1.0262095928192139, -0.21553167700767517, 0.9007289409637451, 0.741065502166748, 0.23711954057216644, 0.7682352066040039, 0.6816884279251099, -0.5936998724937439, 0.5353813171386719, 0.821895182132721, -0.3243664801120758, 0.3892599642276764, 0.2897699773311615, -0.11088036000728607, -0.12104912102222443, 0.42950019240379333, -0.8965339660644531, 0.08366775512695312, 0.055055171251297, -0.35146409273147583, 0.08701519668102264, -0.45192354917526245, 0.32421332597732544, -0.08340121805667877, -0.007412017788738012, 0.3528580069541931, 0.02782588079571724, -0.42958804965019226, 0.9278998970985413, -0.16828610002994537, 0.7397743463516235, -0.5209356546401978, -0.0893474668264389, -0.39301151037216187, 0.5814389586448669, -0.4311206340789795, -1.0740841627120972, 0.18085679411888123, 0.08201007544994354, -0.13707178831100464, -0.1865120828151703, 0.7252019643783569, -0.21308864653110504, -0.7806083559989929, 0.15395140647888184, 0.08493069559335709, 0.0524057112634182, 0.5489375591278076, -0.6708070635795593, -0.3441072702407837, -0.09236738830804825, -0.5919284224510193, 0.12695801258087158, 0.2681829035282135, 0.2756100296974182, 0.5460119843482971, 0.6676664352416992, 0.17774827778339386, 0.43845680356025696, -0.5254788398742676, 0.811652421951294, -1.028210163116455, -0.7149640321731567, -0.9419915080070496, 0.4843640923500061, -0.34596988558769226, -0.8803864121437073, 1.0076210498809814, 1.0646604299545288, 0.905586302280426, -0.011668222025036812, 0.6338649988174438, -0.39336520433425903, 0.25856274366378784, -0.3729024827480316, 0.9586076140403748, -0.8511877059936523, -0.2619974613189697, -0.23853707313537598, -0.7144566178321838, -0.41738829016685486, 0.807179868221283, -0.17339873313903809, 0.0210643969476223, 1.0593549013137817, 0.6393517851829529, -0.09909198433160782, 0.02487318404018879, -0.05226144567131996, 0.5566281676292419, 0.39945119619369507, 0.9919427037239075, 0.6598175764083862, -0.7943602800369263, 0.3452950417995453, -0.5298583507537842, -0.4122339189052582, -0.41863152384757996, -0.41311317682266235, -0.8620432615280151, -0.47319233417510986, -0.20691797137260437, -0.6214382648468018, -0.10232105851173401, 0.9927276372909546, 0.4658667743206024, -0.9447485208511353, -0.3652949631214142, -0.10242736339569092, 0.1701948046684265, -0.5847108960151672, -0.42423251271247864, 0.7556747198104858, -0.1378077268600464, -0.5378499031066895, 0.1791621595621109, -0.1599240005016327, 0.23741360008716583, 0.14257843792438507, -0.419575959444046, -0.7202104926109314, 0.05138378590345383, 0.47453513741493225, 0.3147309124469757, -0.7047706842422485, -0.7478657364845276, 0.3173970878124237, -0.6023293137550354, 0.41661903262138367, -0.02134847454726696, -0.5059452652931213, 0.058478232473134995, 0.7272708415985107, 0.4597560465335846, 0.6599475145339966, 0.0031598873902112246, 0.02759185992181301, -0.6492317318916321, 0.17890363931655884, 0.02584008313715458, 0.24058912694454193, -0.06247727945446968, -0.30921536684036255, 0.7772078514099121, 0.693426787853241, -0.4984127879142761, -1.1020734310150146, -0.4173632860183716, -1.405161738395691, -0.06217540428042412, 1.1471821069717407, 0.00443182373419404, -0.461786687374115, 0.20584213733673096, -0.1364484280347824, 0.2212832272052765, -0.29193001985549927, 0.7868986129760742, 0.7998433113098145, -0.3929148018360138, 0.11392553895711899, -0.6668432354927063, 0.38072022795677185, 0.5127047896385193, -1.2132809162139893, -0.07230453193187714, 0.24975526332855225, 0.32563748955726624, 0.3099222183227539, 0.6234291791915894, -0.1140303984284401, 0.26052728295326233, 0.2606083154678345, 0.022711466997861862, -0.026012472808361053, 0.04823434352874756, -0.21339672803878784, 0.09950952231884003, -0.2718169391155243, -0.44738537073135376 ]
open-llm-leaderboard/details_Kiddyz__testlm-1-1
open-llm-leaderboard
2023-08-27T12:38:58Z
201
0
[ "region:us" ]
null
2023-08-18T11:59:26Z
--- pretty_name: Evaluation run of Kiddyz/testlm-1-1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Kiddyz/testlm-1-1](https://huggingface.co/Kiddyz/testlm-1-1) on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kiddyz__testlm-1-1\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-16T10:49:10.911062](https://huggingface.co/datasets/open-llm-leaderboard/details_Kiddyz__testlm-1-1/blob/main/results_2023-08-16T10%3A49%3A10.911062.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5128834307003443,\n\ \ \"acc_stderr\": 0.03501260490290392,\n \"acc_norm\": 0.5166256154161327,\n\ \ \"acc_norm_stderr\": 0.03500071412093006,\n \"mc1\": 0.32802937576499386,\n\ \ \"mc1_stderr\": 0.01643563293281503,\n \"mc2\": 0.48413168566081527,\n\ \ \"mc2_stderr\": 0.015167638286466481\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5017064846416383,\n \"acc_stderr\": 0.014611305705056992,\n\ \ \"acc_norm\": 0.5349829351535836,\n \"acc_norm_stderr\": 0.014575583922019669\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5705038836885082,\n\ \ \"acc_stderr\": 0.004939925958728884,\n \"acc_norm\": 0.758016331408086,\n\ \ \"acc_norm_stderr\": 0.004274091605308121\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\ \ \"acc_stderr\": 0.04313531696750573,\n \"acc_norm\": 0.4740740740740741,\n\ \ \"acc_norm_stderr\": 0.04313531696750573\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5131578947368421,\n \"acc_stderr\": 0.04067533136309174,\n\ \ \"acc_norm\": 0.5131578947368421,\n \"acc_norm_stderr\": 0.04067533136309174\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5433962264150943,\n \"acc_stderr\": 0.03065674869673943,\n\ \ \"acc_norm\": 0.5433962264150943,\n \"acc_norm_stderr\": 0.03065674869673943\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5555555555555556,\n\ \ \"acc_stderr\": 0.041553199555931467,\n \"acc_norm\": 0.5555555555555556,\n\ \ \"acc_norm_stderr\": 0.041553199555931467\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"\ acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4624277456647399,\n\ \ \"acc_stderr\": 0.0380168510452446,\n \"acc_norm\": 0.4624277456647399,\n\ \ \"acc_norm_stderr\": 0.0380168510452446\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n\ \ \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146267,\n\ \ \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146267\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n\ \ \"acc_stderr\": 0.04303684033537314,\n \"acc_norm\": 0.2982456140350877,\n\ \ \"acc_norm_stderr\": 0.04303684033537314\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.041665675771015785,\n\ \ \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.041665675771015785\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3333333333333333,\n \"acc_stderr\": 0.0242785680243077,\n \"acc_norm\"\ : 0.3333333333333333,\n \"acc_norm_stderr\": 0.0242785680243077\n },\n\ \ \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\ \ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\ \ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.5903225806451613,\n \"acc_stderr\": 0.027976054915347368,\n \"\ acc_norm\": 0.5903225806451613,\n \"acc_norm_stderr\": 0.027976054915347368\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.35960591133004927,\n \"acc_stderr\": 0.033764582465095665,\n \"\ acc_norm\": 0.35960591133004927,\n \"acc_norm_stderr\": 0.033764582465095665\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\ : 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n\ \ \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6262626262626263,\n \"acc_stderr\": 0.03446897738659333,\n \"\ acc_norm\": 0.6262626262626263,\n \"acc_norm_stderr\": 0.03446897738659333\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7202072538860104,\n \"acc_stderr\": 0.03239637046735704,\n\ \ \"acc_norm\": 0.7202072538860104,\n \"acc_norm_stderr\": 0.03239637046735704\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.49743589743589745,\n \"acc_stderr\": 0.025350672979412202,\n\ \ \"acc_norm\": 0.49743589743589745,\n \"acc_norm_stderr\": 0.025350672979412202\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073838,\n \ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073838\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5210084033613446,\n \"acc_stderr\": 0.03244980849990029,\n \ \ \"acc_norm\": 0.5210084033613446,\n \"acc_norm_stderr\": 0.03244980849990029\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"\ acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7119266055045872,\n \"acc_stderr\": 0.01941644589263603,\n \"\ acc_norm\": 0.7119266055045872,\n \"acc_norm_stderr\": 0.01941644589263603\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321616,\n \"\ acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321616\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7156862745098039,\n \"acc_stderr\": 0.03166009679399813,\n \"\ acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.03166009679399813\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7088607594936709,\n \"acc_stderr\": 0.02957160106575337,\n \ \ \"acc_norm\": 0.7088607594936709,\n \"acc_norm_stderr\": 0.02957160106575337\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5919282511210763,\n\ \ \"acc_stderr\": 0.03298574607842822,\n \"acc_norm\": 0.5919282511210763,\n\ \ \"acc_norm_stderr\": 0.03298574607842822\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262972,\n\ \ \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262972\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6528925619834711,\n \"acc_stderr\": 0.043457245702925335,\n \"\ acc_norm\": 0.6528925619834711,\n \"acc_norm_stderr\": 0.043457245702925335\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5833333333333334,\n\ \ \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.5833333333333334,\n\ \ \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5705521472392638,\n \"acc_stderr\": 0.03889066619112722,\n\ \ \"acc_norm\": 0.5705521472392638,\n \"acc_norm_stderr\": 0.03889066619112722\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n\ \ \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n\ \ \"acc_stderr\": 0.027778835904935434,\n \"acc_norm\": 0.7649572649572649,\n\ \ \"acc_norm_stderr\": 0.027778835904935434\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7088122605363985,\n\ \ \"acc_stderr\": 0.0162460870697014,\n \"acc_norm\": 0.7088122605363985,\n\ \ \"acc_norm_stderr\": 0.0162460870697014\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5173410404624278,\n \"acc_stderr\": 0.026902900458666647,\n\ \ \"acc_norm\": 0.5173410404624278,\n \"acc_norm_stderr\": 0.026902900458666647\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.29720670391061454,\n\ \ \"acc_stderr\": 0.015285313353641602,\n \"acc_norm\": 0.29720670391061454,\n\ \ \"acc_norm_stderr\": 0.015285313353641602\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.028452639985088006,\n\ \ \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.028452639985088006\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6045016077170418,\n\ \ \"acc_stderr\": 0.027770918531427838,\n \"acc_norm\": 0.6045016077170418,\n\ \ \"acc_norm_stderr\": 0.027770918531427838\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5709876543209876,\n \"acc_stderr\": 0.027538925613470863,\n\ \ \"acc_norm\": 0.5709876543209876,\n \"acc_norm_stderr\": 0.027538925613470863\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3971631205673759,\n \"acc_stderr\": 0.0291898056735871,\n \ \ \"acc_norm\": 0.3971631205673759,\n \"acc_norm_stderr\": 0.0291898056735871\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3754889178617992,\n\ \ \"acc_stderr\": 0.012367945396728208,\n \"acc_norm\": 0.3754889178617992,\n\ \ \"acc_norm_stderr\": 0.012367945396728208\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4852941176470588,\n \"acc_stderr\": 0.03035969707904611,\n\ \ \"acc_norm\": 0.4852941176470588,\n \"acc_norm_stderr\": 0.03035969707904611\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.49836601307189543,\n \"acc_stderr\": 0.020227726838150124,\n \ \ \"acc_norm\": 0.49836601307189543,\n \"acc_norm_stderr\": 0.020227726838150124\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6081632653061224,\n \"acc_stderr\": 0.031251275910891656,\n\ \ \"acc_norm\": 0.6081632653061224,\n \"acc_norm_stderr\": 0.031251275910891656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6716417910447762,\n\ \ \"acc_stderr\": 0.033206858897443244,\n \"acc_norm\": 0.6716417910447762,\n\ \ \"acc_norm_stderr\": 0.033206858897443244\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3795180722891566,\n\ \ \"acc_stderr\": 0.03777798822748018,\n \"acc_norm\": 0.3795180722891566,\n\ \ \"acc_norm_stderr\": 0.03777798822748018\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.03565079670708311,\n\ \ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.03565079670708311\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.32802937576499386,\n\ \ \"mc1_stderr\": 0.01643563293281503,\n \"mc2\": 0.48413168566081527,\n\ \ \"mc2_stderr\": 0.015167638286466481\n }\n}\n```" repo_url: https://huggingface.co/Kiddyz/testlm-1-1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|arc:challenge|25_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hellaswag|10_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-16T10:49:10.911062.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-management|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-16T10:49:10.911062.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_16T10_49_10.911062 path: - '**/details_harness|truthfulqa:mc|0_2023-08-16T10:49:10.911062.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-16T10:49:10.911062.parquet' - config_name: results data_files: - split: 2023_08_16T10_49_10.911062 path: - results_2023-08-16T10:49:10.911062.parquet - split: latest path: - results_2023-08-16T10:49:10.911062.parquet --- # Dataset Card for Evaluation run of Kiddyz/testlm-1-1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Kiddyz/testlm-1-1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Kiddyz/testlm-1-1](https://huggingface.co/Kiddyz/testlm-1-1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kiddyz__testlm-1-1", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-16T10:49:10.911062](https://huggingface.co/datasets/open-llm-leaderboard/details_Kiddyz__testlm-1-1/blob/main/results_2023-08-16T10%3A49%3A10.911062.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5128834307003443, "acc_stderr": 0.03501260490290392, "acc_norm": 0.5166256154161327, "acc_norm_stderr": 0.03500071412093006, "mc1": 0.32802937576499386, "mc1_stderr": 0.01643563293281503, "mc2": 0.48413168566081527, "mc2_stderr": 0.015167638286466481 }, "harness|arc:challenge|25": { "acc": 0.5017064846416383, "acc_stderr": 0.014611305705056992, "acc_norm": 0.5349829351535836, "acc_norm_stderr": 0.014575583922019669 }, "harness|hellaswag|10": { "acc": 0.5705038836885082, "acc_stderr": 0.004939925958728884, "acc_norm": 0.758016331408086, "acc_norm_stderr": 0.004274091605308121 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750573, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750573 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5131578947368421, "acc_stderr": 0.04067533136309174, "acc_norm": 0.5131578947368421, "acc_norm_stderr": 0.04067533136309174 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5433962264150943, "acc_stderr": 0.03065674869673943, "acc_norm": 0.5433962264150943, "acc_norm_stderr": 0.03065674869673943 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5555555555555556, "acc_stderr": 0.041553199555931467, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.041553199555931467 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4624277456647399, "acc_stderr": 0.0380168510452446, "acc_norm": 0.4624277456647399, "acc_norm_stderr": 0.0380168510452446 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146267, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537314, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537314 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.041665675771015785, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0242785680243077, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0242785680243077 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5903225806451613, "acc_stderr": 0.027976054915347368, "acc_norm": 0.5903225806451613, "acc_norm_stderr": 0.027976054915347368 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.033764582465095665, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.033764582465095665 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6262626262626263, "acc_stderr": 0.03446897738659333, "acc_norm": 0.6262626262626263, "acc_norm_stderr": 0.03446897738659333 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7202072538860104, "acc_stderr": 0.03239637046735704, "acc_norm": 0.7202072538860104, "acc_norm_stderr": 0.03239637046735704 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412202, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412202 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073838, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073838 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5210084033613446, "acc_stderr": 0.03244980849990029, "acc_norm": 0.5210084033613446, "acc_norm_stderr": 0.03244980849990029 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7119266055045872, "acc_stderr": 0.01941644589263603, "acc_norm": 0.7119266055045872, "acc_norm_stderr": 0.01941644589263603 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321616, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321616 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7156862745098039, "acc_stderr": 0.03166009679399813, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.03166009679399813 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7088607594936709, "acc_stderr": 0.02957160106575337, "acc_norm": 0.7088607594936709, "acc_norm_stderr": 0.02957160106575337 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5919282511210763, "acc_stderr": 0.03298574607842822, "acc_norm": 0.5919282511210763, "acc_norm_stderr": 0.03298574607842822 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262972, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262972 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.043457245702925335, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.043457245702925335 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356461, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5705521472392638, "acc_stderr": 0.03889066619112722, "acc_norm": 0.5705521472392638, "acc_norm_stderr": 0.03889066619112722 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935434, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935434 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7088122605363985, "acc_stderr": 0.0162460870697014, "acc_norm": 0.7088122605363985, "acc_norm_stderr": 0.0162460870697014 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5173410404624278, "acc_stderr": 0.026902900458666647, "acc_norm": 0.5173410404624278, "acc_norm_stderr": 0.026902900458666647 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.29720670391061454, "acc_stderr": 0.015285313353641602, "acc_norm": 0.29720670391061454, "acc_norm_stderr": 0.015285313353641602 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5555555555555556, "acc_stderr": 0.028452639985088006, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.028452639985088006 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6045016077170418, "acc_stderr": 0.027770918531427838, "acc_norm": 0.6045016077170418, "acc_norm_stderr": 0.027770918531427838 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5709876543209876, "acc_stderr": 0.027538925613470863, "acc_norm": 0.5709876543209876, "acc_norm_stderr": 0.027538925613470863 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3971631205673759, "acc_stderr": 0.0291898056735871, "acc_norm": 0.3971631205673759, "acc_norm_stderr": 0.0291898056735871 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3754889178617992, "acc_stderr": 0.012367945396728208, "acc_norm": 0.3754889178617992, "acc_norm_stderr": 0.012367945396728208 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4852941176470588, "acc_stderr": 0.03035969707904611, "acc_norm": 0.4852941176470588, "acc_norm_stderr": 0.03035969707904611 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.49836601307189543, "acc_stderr": 0.020227726838150124, "acc_norm": 0.49836601307189543, "acc_norm_stderr": 0.020227726838150124 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6081632653061224, "acc_stderr": 0.031251275910891656, "acc_norm": 0.6081632653061224, "acc_norm_stderr": 0.031251275910891656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6716417910447762, "acc_stderr": 0.033206858897443244, "acc_norm": 0.6716417910447762, "acc_norm_stderr": 0.033206858897443244 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-virology|5": { "acc": 0.3795180722891566, "acc_stderr": 0.03777798822748018, "acc_norm": 0.3795180722891566, "acc_norm_stderr": 0.03777798822748018 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6842105263157895, "acc_stderr": 0.03565079670708311, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.03565079670708311 }, "harness|truthfulqa:mc|0": { "mc1": 0.32802937576499386, "mc1_stderr": 0.01643563293281503, "mc2": 0.48413168566081527, "mc2_stderr": 0.015167638286466481 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7137205004692078, -0.8578280210494995, 0.23187191784381866, 0.1924835592508316, -0.18340763449668884, -0.06229124218225479, 0.05015521124005318, -0.2056218683719635, 0.5306110382080078, -0.09665476530790329, -0.551737368106842, -0.7114219665527344, -0.44707342982292175, 0.2158719301223755, -0.060825444757938385, 0.8415750861167908, -0.16120365262031555, -0.13063204288482666, 0.0639161542057991, 0.00421016663312912, -0.20677931606769562, -0.35826781392097473, -0.4990164339542389, -0.3391497731208801, 0.19866003096103668, 0.44888266921043396, 0.4452720284461975, 0.7893467545509338, 0.6431177258491516, 0.27567726373672485, -0.3265538513660431, -0.03680051118135452, -0.17317801713943481, -0.2750674784183502, 0.4000864028930664, -0.32926446199417114, -0.8430080413818359, 0.32282137870788574, 0.7507052421569824, 0.6333566308021545, -0.09746436029672623, 0.3285936415195465, 0.01601293683052063, 0.5796857476234436, -0.35342225432395935, 0.03541645035147667, -0.2796521782875061, 0.25456705689430237, -0.17499642074108124, -0.24131394922733307, -0.30456456542015076, -0.25248467922210693, -0.13116614520549774, -0.8634656667709351, 0.27998730540275574, 0.3349384367465973, 1.5601849555969238, -0.14149650931358337, -0.23775792121887207, 0.08847061544656754, -0.10717690736055374, 1.0218130350112915, -0.9033064842224121, 0.35976117849349976, 0.7952884435653687, 0.11139912903308868, -0.17097190022468567, -0.5799292922019958, -0.6584153175354004, 0.08011572062969208, -0.3857404589653015, 0.35101112723350525, -0.0480523444712162, -0.19983060657978058, 0.3888378441333771, 0.6325752139091492, -0.6983177065849304, 0.1418442279100418, -0.635446310043335, -0.15367916226387024, 1.089473009109497, 0.3595033884048462, 0.10165806859731674, -0.3562450408935547, -0.7023423314094543, -0.6561875343322754, -0.4156017601490021, 0.2888685166835785, 0.4439675211906433, 0.4033762812614441, -0.3881491422653198, 0.6777483820915222, -0.4114310145378113, 0.5565028786659241, 0.3819238841533661, 0.013253643177449703, 0.8905513286590576, -0.7028759717941284, -0.5581247806549072, -0.03919842094182968, 1.083787202835083, 0.5232941508293152, 0.08337006717920303, 0.23856626451015472, 0.021160880103707314, -0.09598004817962646, 0.04600736126303673, -0.8391464352607727, -0.2964754104614258, 0.14705577492713928, -0.3782923221588135, -0.49163299798965454, 0.3574090898036957, -0.8786664009094238, 0.16537536680698395, -0.028486741706728935, 0.4298289716243744, -0.49658700823783875, -0.11142697930335999, 0.25404128432273865, -0.392953485250473, 0.8340251445770264, -0.17903797328472137, -0.7884471416473389, 0.43249252438545227, 0.5070528984069824, 0.7230789065361023, -0.08413347601890564, -0.4470065236091614, -0.09328284859657288, -0.14647553861141205, -0.31201326847076416, 0.5122314095497131, -0.24343498051166534, -0.42198386788368225, -0.28121036291122437, 0.2771960198879242, -0.3061150014400482, -0.33172279596328735, 0.7424812316894531, -0.22218477725982666, 0.2498328685760498, -0.41116249561309814, -0.6554403305053711, 0.14300376176834106, 0.41670456528663635, -0.3999350070953369, 1.3308720588684082, 0.30222612619400024, -0.8118406534194946, 0.4472945034503937, -0.5723416209220886, -0.20034970343112946, -0.025151830166578293, -0.012824083678424358, -0.8097508549690247, -0.2889299690723419, 0.14935638010501862, 0.3667561709880829, -0.11097529530525208, -0.11822770535945892, -0.36857473850250244, -0.3612343668937683, 0.3416743874549866, -0.16127991676330566, 1.2486222982406616, -0.034959834069013596, -0.741731584072113, -0.08556368947029114, -1.2291241884231567, 0.3549346625804901, 0.2241683006286621, -0.3455420434474945, -0.1910877376794815, -0.45831164717674255, -0.011319664306938648, 0.23236198723316193, 0.2533583343029022, -0.8033758997917175, 0.28708887100219727, -0.37137922644615173, 0.12318521738052368, 1.255211353302002, 0.009661360643804073, 0.11340105533599854, -0.5621406435966492, 0.5498978495597839, 0.19544483721256256, 0.25571975111961365, 0.3864208161830902, -0.6014172434806824, -0.7976096272468567, -0.47671738266944885, -0.0389394573867321, 0.590249240398407, -0.1685108244419098, 1.142215371131897, 0.05612289905548096, -0.8885654211044312, -0.42080092430114746, -0.12467688322067261, 0.5231632590293884, 0.8301665782928467, 0.5930212140083313, -0.009030871093273163, -0.6466633081436157, -1.114160180091858, -0.2714322507381439, -0.20472022891044617, 0.14306417107582092, 0.20977461338043213, 1.0414825677871704, -0.23780973255634308, 0.6118063926696777, -1.0147323608398438, -0.19186300039291382, 0.1652301847934723, -0.10796868056058884, 0.7861704230308533, 0.759065568447113, 0.5738078951835632, -0.6973315477371216, -0.5573170185089111, 0.14330115914344788, -0.8284419775009155, -0.07651243358850479, 0.17301686108112335, -0.3143883943557739, 0.13314451277256012, 0.09474995732307434, -0.7016125321388245, 0.5403512716293335, 0.21507717669010162, -1.1418321132659912, 1.0821529626846313, -0.3079218864440918, 0.5526574850082397, -0.9673950672149658, 0.16480660438537598, -0.09527774155139923, 0.032656989991664886, -0.526063084602356, 0.017837366089224815, 0.10344860702753067, 0.47347766160964966, -0.5372075438499451, 0.8371579051017761, -0.6333813071250916, -0.046432238072156906, 0.5058910250663757, 0.13728198409080505, -0.10846707224845886, 0.3428412675857544, -0.3051327168941498, 0.8244338035583496, 0.7673853635787964, -0.47134721279144287, 0.5102688074111938, 0.44487082958221436, -0.19132211804389954, 0.6627146005630493, -0.4687403440475464, -0.31012633442878723, 0.3353918790817261, -0.06955038011074066, -0.8625028133392334, -0.47864171862602234, 0.08272479474544525, -0.6010226607322693, -0.10149598121643066, 0.386880099773407, -0.318676620721817, -0.8392743468284607, -0.9533191919326782, 0.31292980909347534, 0.770226001739502, -0.41993460059165955, -0.2021600306034088, 0.06052672117948532, 0.07892918586730957, -0.8097894191741943, -0.8271965980529785, -0.5061747431755066, -0.20272493362426758, -0.6984477043151855, 0.3014675974845886, -0.27609750628471375, -0.289937824010849, -0.07649221271276474, -0.2799147069454193, -0.3493879437446594, 0.0066460538655519485, 0.12971298396587372, 0.7042959928512573, -0.4182114601135254, -0.25090882182121277, -0.2339496910572052, -0.1502579301595688, 0.2675503194332123, -0.08669358491897583, 0.36218294501304626, -0.4675599932670593, -0.40633073449134827, -0.36491402983665466, -0.03139442577958107, 0.7093023061752319, -0.0410785973072052, 0.7080875039100647, 0.39388522505760193, -0.3291913866996765, -0.056103307753801346, -0.2537866234779358, -0.27281326055526733, -0.578429639339447, 0.2479376494884491, -0.4840776324272156, -1.027355432510376, 0.7783323526382446, 0.5623061656951904, 0.01293499581515789, 1.13228440284729, 0.586426854133606, -0.30223727226257324, 1.0354390144348145, 0.03425431624054909, 0.3602791130542755, 0.38640254735946655, -0.6567831039428711, 0.12217887490987778, -0.9139621257781982, -0.34456637501716614, -0.5911874771118164, -0.4716685116291046, -0.6766286492347717, -0.03762085363268852, 0.24769815802574158, 0.1636519730091095, -0.6993632316589355, 0.5665457844734192, -0.8304045796394348, 0.5816764831542969, 0.5777138471603394, 0.2699768841266632, 0.16520898044109344, -0.18298065662384033, -0.42356547713279724, -0.11952415108680725, -0.5150068402290344, -0.2730962932109833, 1.2198206186294556, 0.2502094507217407, 0.7230306267738342, 0.0648714154958725, 0.8870735764503479, 0.10256467759609222, -0.03431755304336548, -0.5731174349784851, 0.6546621918678284, 0.09370116144418716, -0.8284953236579895, -0.44825828075408936, -0.469821572303772, -1.0710766315460205, 0.4004761576652527, -0.13470035791397095, -0.8398095965385437, 0.0846831277012825, 0.004922203719615936, -0.20888713002204895, 0.46562379598617554, -0.5606259703636169, 0.8566587567329407, -0.12647123634815216, -0.48909786343574524, 0.13215923309326172, -0.8466930985450745, 0.4636484384536743, 0.18892404437065125, 0.27449384331703186, 0.002884361892938614, 0.26006802916526794, 1.1815580129623413, -0.8016815185546875, 0.41723188757896423, 0.09109941124916077, 0.02158759906888008, 0.33909884095191956, -0.16516700387001038, 0.47306737303733826, 0.09137976914644241, -0.021266313269734383, -0.14560292661190033, 0.3142867386341095, -0.881833553314209, -0.057655904442071915, 0.9126543998718262, -0.9282792210578918, -0.5989118218421936, -0.8942751288414001, -0.5579712390899658, 0.06500286608934402, 0.5851542353630066, 0.3445969223976135, 0.5316783785820007, -0.009034634567797184, 0.4180401563644409, 0.8577104806900024, -0.1139117106795311, 0.6168570518493652, 0.23828940093517303, 0.04038108512759209, -0.6188840866088867, 0.8050739169120789, 0.09387949854135513, 0.3535459041595459, 0.22065311670303345, 0.4140618145465851, -0.5778300166130066, -0.19923637807369232, -0.2247965931892395, 0.47651761770248413, -0.6418090462684631, -0.2619756758213043, -0.3520869016647339, -0.41135433316230774, -0.7765665650367737, -0.6335158348083496, -0.31848806142807007, -0.5205824375152588, -0.47196412086486816, -0.4930371344089508, 0.535724937915802, 0.45281922817230225, -0.34430113434791565, 0.008284595794975758, -0.49406561255455017, 0.2507493495941162, 0.3593786060810089, 0.580924928188324, -0.40644538402557373, -0.5652644038200378, 0.06068140268325806, -0.13749773800373077, -0.5211387872695923, -0.9297060966491699, 0.3439806401729584, -0.04604304954409599, 0.5266865491867065, 0.5758021473884583, 0.11034467816352844, 0.8191297650337219, -0.21782323718070984, 1.0731537342071533, 0.31524133682250977, -0.8029345870018005, 0.7521371245384216, -0.33221474289894104, 0.16859595477581024, 0.6427057981491089, 0.14274632930755615, -0.172998309135437, -0.6809560060501099, -1.345153570175171, -0.7986559867858887, 0.6668134331703186, 0.41622215509414673, -0.1999039202928543, 0.034476522356271744, 0.12010166794061661, -0.2574118971824646, -0.17408539354801178, -0.6474153399467468, -0.9028380513191223, -0.14815166592597961, -0.5379126071929932, 0.10390739887952805, 0.02012583240866661, -0.4249604642391205, -0.8739748597145081, 0.9187663793563843, 0.036152128130197525, 0.5480908155441284, 0.47778546810150146, 0.08705539256334305, 0.04563552513718605, 0.5220845341682434, 0.9787281155586243, 0.7780168056488037, -0.46320387721061707, 0.4578231871128082, 0.4029093384742737, -1.0660326480865479, 0.512542188167572, 0.3394191265106201, -0.1018914133310318, -0.012553065083920956, 0.48670709133148193, 0.3954118490219116, 0.0681810975074768, -0.24230825901031494, 0.6369888186454773, -0.011363114230334759, -0.5828222036361694, -0.39602580666542053, 0.13819043338298798, -0.14891833066940308, -0.018488449975848198, 0.4354444742202759, -0.17440949380397797, -0.06127500906586647, -0.48676303029060364, 0.5179466009140015, 0.3701780438423157, -0.48026832938194275, -0.1584460288286209, 0.7132991552352905, -0.2266152799129486, -0.18976663053035736, 0.3190882205963135, -0.20370256900787354, -0.6712691187858582, 1.1734644174575806, 0.6283020973205566, 0.6743929982185364, -0.238305002450943, -0.045622825622558594, 0.9187853336334229, 0.39079707860946655, -0.02541874162852764, 0.5251957178115845, 0.34369033575057983, -0.25595682859420776, 0.19543014466762543, -0.853398859500885, -0.07001490890979767, 0.14215850830078125, -0.8091171383857727, 0.3116058111190796, -0.4935420751571655, -0.17133034765720367, -0.020209889858961105, 0.4299536943435669, -0.4653175175189972, 0.5850195288658142, -0.4011095464229584, 1.225394606590271, -0.9714592695236206, 0.7303199172019958, 0.7593971490859985, -0.5280753970146179, -1.0506442785263062, -0.5321394205093384, -0.005386775825172663, -0.776350200176239, 0.6249635219573975, -0.06663718074560165, 0.1843055635690689, -0.02832457236945629, -0.7520735263824463, -0.9131894707679749, 1.3704849481582642, -0.10137680172920227, -0.4270547032356262, 0.2370343804359436, -0.06683676689863205, 0.4721930921077728, 0.1285340040922165, 0.5411933064460754, 0.7408739328384399, 0.8237638473510742, -0.10623631626367569, -0.7112777233123779, 0.3526322841644287, -0.5370259881019592, -0.355049729347229, 0.4531150460243225, -0.8678551912307739, 1.234878420829773, -0.01413741149008274, 0.17979861795902252, -0.23116043210029602, 0.5911402702331543, 0.8070407509803772, 0.34127387404441833, 0.37051206827163696, 0.8756356835365295, 0.8242279887199402, -0.5014520287513733, 1.035839319229126, -0.2146686613559723, 0.9058321118354797, 0.7369788885116577, 0.23986858129501343, 0.7411558032035828, 0.7016541957855225, -0.6050390601158142, 0.5338362455368042, 0.8086278438568115, -0.30963245034217834, 0.39598241448402405, 0.2874399721622467, -0.121639683842659, -0.13287699222564697, 0.4453990161418915, -0.8815174102783203, 0.0732247605919838, 0.05676427483558655, -0.32596129179000854, 0.062039270997047424, -0.4308915138244629, 0.3122975528240204, -0.08843398094177246, -0.025466419756412506, 0.3400570750236511, 0.035372890532016754, -0.4052664339542389, 0.9229011535644531, -0.20445674657821655, 0.7477056980133057, -0.5536074638366699, -0.0863703042268753, -0.3767542243003845, 0.5867878794670105, -0.4086020588874817, -1.0488495826721191, 0.1767113208770752, 0.057191673666238785, -0.1279412806034088, -0.18152156472206116, 0.7425810098648071, -0.20826628804206848, -0.7821177244186401, 0.139390230178833, 0.11088244616985321, 0.04752873629331589, 0.5336658358573914, -0.6705463528633118, -0.33794164657592773, -0.09501722455024719, -0.59755939245224, 0.10657739639282227, 0.26306384801864624, 0.29830503463745117, 0.5708392262458801, 0.664239764213562, 0.17033296823501587, 0.4586362838745117, -0.5398296117782593, 0.8162562251091003, -1.042129635810852, -0.7170441746711731, -0.9341109395027161, 0.47320330142974854, -0.3190736770629883, -0.8914394974708557, 1.0265141725540161, 1.035999059677124, 0.9249923229217529, -0.02652071602642536, 0.6327483057975769, -0.4001142978668213, 0.2643700838088989, -0.3643074035644531, 0.9550230503082275, -0.842199981212616, -0.2527763843536377, -0.25204795598983765, -0.6867620348930359, -0.4252777695655823, 0.8181760311126709, -0.16720180213451385, 0.016456905752420425, 1.072332739830017, 0.6327531337738037, -0.10291240364313126, 0.03673647716641426, -0.031857412308454514, 0.5527811050415039, 0.36658546328544617, 0.9742169380187988, 0.6540812253952026, -0.8141655921936035, 0.3352617621421814, -0.5427185297012329, -0.3742789328098297, -0.3884096145629883, -0.4409964382648468, -0.8462570309638977, -0.477313369512558, -0.22161224484443665, -0.6167707443237305, -0.10794492810964584, 1.0135204792022705, 0.4633394777774811, -0.9346953630447388, -0.35571590065956116, -0.10720239579677582, 0.2024790197610855, -0.5861636400222778, -0.4226098358631134, 0.7235614657402039, -0.14350983500480652, -0.5387526750564575, 0.17752201855182648, -0.15688501298427582, 0.23469410836696625, 0.11769359558820724, -0.4238247871398926, -0.7492371201515198, 0.0550055131316185, 0.4509800374507904, 0.3093464970588684, -0.7091204524040222, -0.7323218584060669, 0.34035488963127136, -0.5903603434562683, 0.43366169929504395, -0.028775827959179878, -0.5203201174736023, 0.052870042622089386, 0.6926460862159729, 0.45294609665870667, 0.6602790355682373, -0.02303854003548622, 0.022177858278155327, -0.6649979948997498, 0.17246127128601074, 0.031832337379455566, 0.23997479677200317, -0.06670329719781876, -0.2997168004512787, 0.799411952495575, 0.6738898158073425, -0.4995477497577667, -1.1028220653533936, -0.42397791147232056, -1.4129388332366943, -0.04958079382777214, 1.1095629930496216, 0.0034727968741208315, -0.4959014058113098, 0.2144019901752472, -0.12064671516418457, 0.21828188002109528, -0.28561171889305115, 0.7800723314285278, 0.8266454935073853, -0.37745407223701477, 0.1084400936961174, -0.6552093625068665, 0.35902127623558044, 0.5270860195159912, -1.2111880779266357, -0.08636011928319931, 0.23376524448394775, 0.330972820520401, 0.32989081740379333, 0.6494783759117126, -0.10995621979236603, 0.24682597815990448, 0.27905622124671936, 0.03310534358024597, -0.021205434575676918, 0.0583764985203743, -0.23204928636550903, 0.09004121273756027, -0.288369745016098, -0.449279248714447 ]
open-llm-leaderboard/details_gywy__llama2-13b-chinese-v1
open-llm-leaderboard
2023-08-27T12:39:03Z
201
0
[ "region:us" ]
null
2023-08-18T11:59:52Z
--- pretty_name: Evaluation run of gywy/llama2-13b-chinese-v1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [gywy/llama2-13b-chinese-v1](https://huggingface.co/gywy/llama2-13b-chinese-v1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_gywy__llama2-13b-chinese-v1\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-26T15:10:00.921624](https://huggingface.co/datasets/open-llm-leaderboard/details_gywy__llama2-13b-chinese-v1/blob/main/results_2023-07-26T15%3A10%3A00.921624.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5420814148370803,\n\ \ \"acc_stderr\": 0.03472894201222865,\n \"acc_norm\": 0.5463875639849175,\n\ \ \"acc_norm_stderr\": 0.03471430598894899,\n \"mc1\": 0.3182374541003672,\n\ \ \"mc1_stderr\": 0.016305988648920612,\n \"mc2\": 0.45724154700953135,\n\ \ \"mc2_stderr\": 0.015310459215672905\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5631399317406144,\n \"acc_stderr\": 0.014494421584256513,\n\ \ \"acc_norm\": 0.5981228668941979,\n \"acc_norm_stderr\": 0.014327268614578278\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5381398127862975,\n\ \ \"acc_stderr\": 0.004975243508751998,\n \"acc_norm\": 0.7572196773551085,\n\ \ \"acc_norm_stderr\": 0.004278871104930374\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5037037037037037,\n\ \ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.5037037037037037,\n\ \ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n\ \ \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n\ \ \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5555555555555556,\n\ \ \"acc_stderr\": 0.041553199555931467,\n \"acc_norm\": 0.5555555555555556,\n\ \ \"acc_norm_stderr\": 0.041553199555931467\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n\ \ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5086705202312138,\n\ \ \"acc_stderr\": 0.03811890988940412,\n \"acc_norm\": 0.5086705202312138,\n\ \ \"acc_norm_stderr\": 0.03811890988940412\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006717,\n\ \ \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006717\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n\ \ \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146268,\n\ \ \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146268\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n\ \ \"acc_stderr\": 0.04303684033537314,\n \"acc_norm\": 0.2982456140350877,\n\ \ \"acc_norm_stderr\": 0.04303684033537314\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n\ \ \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3201058201058201,\n \"acc_stderr\": 0.024026846392873506,\n \"\ acc_norm\": 0.3201058201058201,\n \"acc_norm_stderr\": 0.024026846392873506\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n\ \ \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n\ \ \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6612903225806451,\n\ \ \"acc_stderr\": 0.026923446059302837,\n \"acc_norm\": 0.6612903225806451,\n\ \ \"acc_norm_stderr\": 0.026923446059302837\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.43349753694581283,\n \"acc_stderr\": 0.034867317274198714,\n\ \ \"acc_norm\": 0.43349753694581283,\n \"acc_norm_stderr\": 0.034867317274198714\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\ : 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.036639749943912434,\n\ \ \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.036639749943912434\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6868686868686869,\n \"acc_stderr\": 0.033042050878136525,\n \"\ acc_norm\": 0.6868686868686869,\n \"acc_norm_stderr\": 0.033042050878136525\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7772020725388601,\n \"acc_stderr\": 0.03003114797764154,\n\ \ \"acc_norm\": 0.7772020725388601,\n \"acc_norm_stderr\": 0.03003114797764154\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4846153846153846,\n \"acc_stderr\": 0.025339003010106515,\n\ \ \"acc_norm\": 0.4846153846153846,\n \"acc_norm_stderr\": 0.025339003010106515\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3037037037037037,\n \"acc_stderr\": 0.02803792996911499,\n \ \ \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.02803792996911499\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6008403361344538,\n \"acc_stderr\": 0.03181110032413926,\n \ \ \"acc_norm\": 0.6008403361344538,\n \"acc_norm_stderr\": 0.03181110032413926\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"\ acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7009174311926606,\n \"acc_stderr\": 0.019630417285415175,\n \"\ acc_norm\": 0.7009174311926606,\n \"acc_norm_stderr\": 0.019630417285415175\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4027777777777778,\n \"acc_stderr\": 0.03344887382997867,\n \"\ acc_norm\": 0.4027777777777778,\n \"acc_norm_stderr\": 0.03344887382997867\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7401960784313726,\n \"acc_stderr\": 0.03077855467869326,\n \"\ acc_norm\": 0.7401960784313726,\n \"acc_norm_stderr\": 0.03077855467869326\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7426160337552743,\n \"acc_stderr\": 0.028458820991460302,\n \ \ \"acc_norm\": 0.7426160337552743,\n \"acc_norm_stderr\": 0.028458820991460302\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n\ \ \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n\ \ \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6335877862595419,\n \"acc_stderr\": 0.042258754519696365,\n\ \ \"acc_norm\": 0.6335877862595419,\n \"acc_norm_stderr\": 0.042258754519696365\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n\ \ \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n\ \ \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6134969325153374,\n \"acc_stderr\": 0.03825825548848608,\n\ \ \"acc_norm\": 0.6134969325153374,\n \"acc_norm_stderr\": 0.03825825548848608\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n\ \ \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n\ \ \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7087378640776699,\n \"acc_stderr\": 0.044986763205729245,\n\ \ \"acc_norm\": 0.7087378640776699,\n \"acc_norm_stderr\": 0.044986763205729245\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7564102564102564,\n\ \ \"acc_stderr\": 0.02812096650391442,\n \"acc_norm\": 0.7564102564102564,\n\ \ \"acc_norm_stderr\": 0.02812096650391442\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7254150702426565,\n\ \ \"acc_stderr\": 0.015959829933084025,\n \"acc_norm\": 0.7254150702426565,\n\ \ \"acc_norm_stderr\": 0.015959829933084025\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6098265895953757,\n \"acc_stderr\": 0.026261677607806636,\n\ \ \"acc_norm\": 0.6098265895953757,\n \"acc_norm_stderr\": 0.026261677607806636\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37094972067039106,\n\ \ \"acc_stderr\": 0.016155910721341774,\n \"acc_norm\": 0.37094972067039106,\n\ \ \"acc_norm_stderr\": 0.016155910721341774\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5784313725490197,\n \"acc_stderr\": 0.028275490156791455,\n\ \ \"acc_norm\": 0.5784313725490197,\n \"acc_norm_stderr\": 0.028275490156791455\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6237942122186495,\n\ \ \"acc_stderr\": 0.02751392568354943,\n \"acc_norm\": 0.6237942122186495,\n\ \ \"acc_norm_stderr\": 0.02751392568354943\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.02751374728437942,\n\ \ \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.02751374728437942\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4432624113475177,\n \"acc_stderr\": 0.029634838473766,\n \ \ \"acc_norm\": 0.4432624113475177,\n \"acc_norm_stderr\": 0.029634838473766\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3970013037809648,\n\ \ \"acc_stderr\": 0.012496346982909553,\n \"acc_norm\": 0.3970013037809648,\n\ \ \"acc_norm_stderr\": 0.012496346982909553\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5073529411764706,\n \"acc_stderr\": 0.030369552523902173,\n\ \ \"acc_norm\": 0.5073529411764706,\n \"acc_norm_stderr\": 0.030369552523902173\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5375816993464052,\n \"acc_stderr\": 0.02017061497496976,\n \ \ \"acc_norm\": 0.5375816993464052,\n \"acc_norm_stderr\": 0.02017061497496976\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6122448979591837,\n \"acc_stderr\": 0.031192230726795656,\n\ \ \"acc_norm\": 0.6122448979591837,\n \"acc_norm_stderr\": 0.031192230726795656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7263681592039801,\n\ \ \"acc_stderr\": 0.031524391865554016,\n \"acc_norm\": 0.7263681592039801,\n\ \ \"acc_norm_stderr\": 0.031524391865554016\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \ \ \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n\ \ \"acc_stderr\": 0.038879718495972646,\n \"acc_norm\": 0.4759036144578313,\n\ \ \"acc_norm_stderr\": 0.038879718495972646\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.03274485211946956,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.03274485211946956\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3182374541003672,\n\ \ \"mc1_stderr\": 0.016305988648920612,\n \"mc2\": 0.45724154700953135,\n\ \ \"mc2_stderr\": 0.015310459215672905\n }\n}\n```" repo_url: https://huggingface.co/gywy/llama2-13b-chinese-v1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|arc:challenge|25_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hellaswag|10_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-26T15:10:00.921624.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-management|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T15:10:00.921624.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_26T15_10_00.921624 path: - '**/details_harness|truthfulqa:mc|0_2023-07-26T15:10:00.921624.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-26T15:10:00.921624.parquet' - config_name: results data_files: - split: 2023_07_26T15_10_00.921624 path: - results_2023-07-26T15:10:00.921624.parquet - split: latest path: - results_2023-07-26T15:10:00.921624.parquet --- # Dataset Card for Evaluation run of gywy/llama2-13b-chinese-v1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/gywy/llama2-13b-chinese-v1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [gywy/llama2-13b-chinese-v1](https://huggingface.co/gywy/llama2-13b-chinese-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_gywy__llama2-13b-chinese-v1", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-26T15:10:00.921624](https://huggingface.co/datasets/open-llm-leaderboard/details_gywy__llama2-13b-chinese-v1/blob/main/results_2023-07-26T15%3A10%3A00.921624.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5420814148370803, "acc_stderr": 0.03472894201222865, "acc_norm": 0.5463875639849175, "acc_norm_stderr": 0.03471430598894899, "mc1": 0.3182374541003672, "mc1_stderr": 0.016305988648920612, "mc2": 0.45724154700953135, "mc2_stderr": 0.015310459215672905 }, "harness|arc:challenge|25": { "acc": 0.5631399317406144, "acc_stderr": 0.014494421584256513, "acc_norm": 0.5981228668941979, "acc_norm_stderr": 0.014327268614578278 }, "harness|hellaswag|10": { "acc": 0.5381398127862975, "acc_stderr": 0.004975243508751998, "acc_norm": 0.7572196773551085, "acc_norm_stderr": 0.004278871104930374 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5037037037037037, "acc_stderr": 0.04319223625811331, "acc_norm": 0.5037037037037037, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.04033565667848319, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5555555555555556, "acc_stderr": 0.041553199555931467, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.041553199555931467 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5086705202312138, "acc_stderr": 0.03811890988940412, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.03811890988940412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006717, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006717 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146268, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537314, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537314 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3201058201058201, "acc_stderr": 0.024026846392873506, "acc_norm": 0.3201058201058201, "acc_norm_stderr": 0.024026846392873506 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6612903225806451, "acc_stderr": 0.026923446059302837, "acc_norm": 0.6612903225806451, "acc_norm_stderr": 0.026923446059302837 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43349753694581283, "acc_stderr": 0.034867317274198714, "acc_norm": 0.43349753694581283, "acc_norm_stderr": 0.034867317274198714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6727272727272727, "acc_stderr": 0.036639749943912434, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.036639749943912434 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6868686868686869, "acc_stderr": 0.033042050878136525, "acc_norm": 0.6868686868686869, "acc_norm_stderr": 0.033042050878136525 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7772020725388601, "acc_stderr": 0.03003114797764154, "acc_norm": 0.7772020725388601, "acc_norm_stderr": 0.03003114797764154 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4846153846153846, "acc_stderr": 0.025339003010106515, "acc_norm": 0.4846153846153846, "acc_norm_stderr": 0.025339003010106515 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.02803792996911499, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.02803792996911499 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6008403361344538, "acc_stderr": 0.03181110032413926, "acc_norm": 0.6008403361344538, "acc_norm_stderr": 0.03181110032413926 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7009174311926606, "acc_stderr": 0.019630417285415175, "acc_norm": 0.7009174311926606, "acc_norm_stderr": 0.019630417285415175 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.03344887382997867, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.03344887382997867 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7401960784313726, "acc_stderr": 0.03077855467869326, "acc_norm": 0.7401960784313726, "acc_norm_stderr": 0.03077855467869326 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7426160337552743, "acc_stderr": 0.028458820991460302, "acc_norm": 0.7426160337552743, "acc_norm_stderr": 0.028458820991460302 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.042258754519696365, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.042258754519696365 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6134969325153374, "acc_stderr": 0.03825825548848608, "acc_norm": 0.6134969325153374, "acc_norm_stderr": 0.03825825548848608 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|hendrycksTest-management|5": { "acc": 0.7087378640776699, "acc_stderr": 0.044986763205729245, "acc_norm": 0.7087378640776699, "acc_norm_stderr": 0.044986763205729245 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7564102564102564, "acc_stderr": 0.02812096650391442, "acc_norm": 0.7564102564102564, "acc_norm_stderr": 0.02812096650391442 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7254150702426565, "acc_stderr": 0.015959829933084025, "acc_norm": 0.7254150702426565, "acc_norm_stderr": 0.015959829933084025 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6098265895953757, "acc_stderr": 0.026261677607806636, "acc_norm": 0.6098265895953757, "acc_norm_stderr": 0.026261677607806636 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37094972067039106, "acc_stderr": 0.016155910721341774, "acc_norm": 0.37094972067039106, "acc_norm_stderr": 0.016155910721341774 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5784313725490197, "acc_stderr": 0.028275490156791455, "acc_norm": 0.5784313725490197, "acc_norm_stderr": 0.028275490156791455 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6237942122186495, "acc_stderr": 0.02751392568354943, "acc_norm": 0.6237942122186495, "acc_norm_stderr": 0.02751392568354943 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5740740740740741, "acc_stderr": 0.02751374728437942, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.02751374728437942 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4432624113475177, "acc_stderr": 0.029634838473766, "acc_norm": 0.4432624113475177, "acc_norm_stderr": 0.029634838473766 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3970013037809648, "acc_stderr": 0.012496346982909553, "acc_norm": 0.3970013037809648, "acc_norm_stderr": 0.012496346982909553 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5073529411764706, "acc_stderr": 0.030369552523902173, "acc_norm": 0.5073529411764706, "acc_norm_stderr": 0.030369552523902173 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5375816993464052, "acc_stderr": 0.02017061497496976, "acc_norm": 0.5375816993464052, "acc_norm_stderr": 0.02017061497496976 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6122448979591837, "acc_stderr": 0.031192230726795656, "acc_norm": 0.6122448979591837, "acc_norm_stderr": 0.031192230726795656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7263681592039801, "acc_stderr": 0.031524391865554016, "acc_norm": 0.7263681592039801, "acc_norm_stderr": 0.031524391865554016 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.038879718495972646, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.038879718495972646 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.03274485211946956, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.03274485211946956 }, "harness|truthfulqa:mc|0": { "mc1": 0.3182374541003672, "mc1_stderr": 0.016305988648920612, "mc2": 0.45724154700953135, "mc2_stderr": 0.015310459215672905 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6977558732032776, -0.8621322512626648, 0.2935362458229065, 0.25496605038642883, -0.18157804012298584, -0.06215868890285492, 0.01911226101219654, -0.2767912745475769, 0.5880426168441772, -0.07513194531202316, -0.501459002494812, -0.6816306710243225, -0.4544496238231659, 0.24678565561771393, -0.028950799256563187, 0.7955531477928162, -0.2198174148797989, -0.12315821647644043, 0.10543936491012573, -0.03037836402654648, -0.2322545051574707, -0.33477380871772766, -0.4753760099411011, -0.3658723831176758, 0.19052226841449738, 0.4337491989135742, 0.45419228076934814, 0.8317667841911316, 0.673537015914917, 0.2915553152561188, -0.30284133553504944, -0.010148013941943645, -0.16533929109573364, -0.31214237213134766, 0.39661669731140137, -0.3471395969390869, -0.8217426538467407, 0.2975020706653595, 0.7425310611724854, 0.620010256767273, -0.08249625563621521, 0.29205188155174255, 0.04380081966519356, 0.5587347745895386, -0.345223069190979, 0.03493078052997589, -0.2952898442745209, 0.21846063435077667, -0.190528005361557, -0.27727773785591125, -0.28880804777145386, -0.2450522482395172, -0.1498405486345291, -0.8905072212219238, 0.2675904631614685, 0.2774374186992645, 1.6025670766830444, -0.12881986796855927, -0.2522919178009033, 0.09446188062429428, -0.08222204446792603, 1.0071324110031128, -0.8796653747558594, 0.34579309821128845, 0.780981183052063, 0.1006927415728569, -0.19811801612377167, -0.6120554208755493, -0.6268237233161926, 0.05274048075079918, -0.40149247646331787, 0.3530716896057129, -0.06240467727184296, -0.17555594444274902, 0.3537452518939972, 0.692848265171051, -0.6694750189781189, 0.21220140159130096, -0.6548177003860474, -0.1757507175207138, 1.0781663656234741, 0.32613900303840637, 0.09046931564807892, -0.35409578680992126, -0.7162059545516968, -0.6331337094306946, -0.42336389422416687, 0.26321539282798767, 0.4547056555747986, 0.35473135113716125, -0.41514259576797485, 0.682448148727417, -0.42954757809638977, 0.5239472389221191, 0.42819759249687195, 0.005680396221578121, 0.8977153301239014, -0.6926042437553406, -0.5350744724273682, -0.050004422664642334, 1.1248024702072144, 0.5755084156990051, 0.0686500295996666, 0.21912528574466705, 0.0409097783267498, -0.08479242771863937, -0.01748695969581604, -0.8953314423561096, -0.29888975620269775, 0.17934754490852356, -0.4032776653766632, -0.5032548308372498, 0.34216347336769104, -0.8429135680198669, 0.13830116391181946, -0.03919531777501106, 0.39922061562538147, -0.4712448716163635, -0.14308588206768036, 0.23618853092193604, -0.40866631269454956, 0.8565073013305664, -0.16894233226776123, -0.8275045156478882, 0.37007227540016174, 0.5060322284698486, 0.7640246748924255, -0.10030369460582733, -0.45782262086868286, -0.11240603774785995, -0.11405517905950546, -0.29384899139404297, 0.5162304639816284, -0.2625737190246582, -0.4510432481765747, -0.2774932086467743, 0.333010733127594, -0.22896002233028412, -0.35974887013435364, 0.7179586887359619, -0.1909702569246292, 0.22681647539138794, -0.4704708456993103, -0.6197901964187622, 0.12603938579559326, 0.3710196614265442, -0.4219750165939331, 1.3199076652526855, 0.24435828626155853, -0.8192211389541626, 0.4071061313152313, -0.5431566834449768, -0.1947355568408966, -0.02713794820010662, -0.05462877079844475, -0.7631082534790039, -0.2792096436023712, 0.19033095240592957, 0.4122713506221771, -0.14328131079673767, -0.11583061516284943, -0.3590521812438965, -0.3810150623321533, 0.2987050414085388, -0.1694096475839615, 1.2420049905776978, -0.0545201450586319, -0.7719951868057251, -0.11944373697042465, -1.249340295791626, 0.2978152632713318, 0.2332286238670349, -0.40188300609588623, -0.1975831836462021, -0.45889320969581604, -0.011953742243349552, 0.18405592441558838, 0.3083433508872986, -0.7878702878952026, 0.32219892740249634, -0.3450930118560791, 0.1664065569639206, 1.2732946872711182, 0.014164174906909466, 0.12724436819553375, -0.5326781272888184, 0.4910348951816559, 0.2169148176908493, 0.17575782537460327, 0.38425153493881226, -0.6240490674972534, -0.7901185154914856, -0.48225316405296326, -0.0627959594130516, 0.6544443964958191, -0.2280128002166748, 1.1117191314697266, 0.08768895268440247, -0.903520941734314, -0.45634743571281433, -0.09537157416343689, 0.4917842149734497, 0.7860760688781738, 0.6188028454780579, -0.06265249103307724, -0.6256301403045654, -1.1150834560394287, -0.2670845687389374, -0.2230946570634842, 0.11969994008541107, 0.1941930055618286, 1.019832730293274, -0.2831229269504547, 0.567787766456604, -1.0099568367004395, -0.1951865404844284, 0.1559075266122818, -0.0915595218539238, 0.7878950834274292, 0.7360266447067261, 0.5944116115570068, -0.6696293950080872, -0.5026434659957886, 0.20908990502357483, -0.895126223564148, -0.1257607787847519, 0.0982537567615509, -0.3300933539867401, 0.1367984563112259, 0.13976915180683136, -0.673458456993103, 0.5059723258018494, 0.24808825552463531, -1.0753618478775024, 1.0712813138961792, -0.317832350730896, 0.5601507425308228, -1.0119390487670898, 0.16344234347343445, -0.012233546935021877, 0.05974658951163292, -0.519938588142395, 0.031753599643707275, 0.10329416394233704, 0.51500403881073, -0.4981380105018616, 0.8073100447654724, -0.6740836501121521, -0.053796615451574326, 0.464480459690094, 0.1372556835412979, -0.11237993836402893, 0.37254607677459717, -0.2122361809015274, 0.823861300945282, 0.7238423824310303, -0.4762006402015686, 0.5173333287239075, 0.43606963753700256, -0.23754338920116425, 0.6846413016319275, -0.5052137970924377, -0.3112497329711914, 0.325281023979187, -0.06834331154823303, -0.8561682105064392, -0.48856523633003235, 0.07407403737306595, -0.5875207185745239, -0.10527653992176056, 0.4012566804885864, -0.26710718870162964, -0.8036952614784241, -0.9733802676200867, 0.31722292304039, 0.7025781273841858, -0.46525174379348755, -0.21015943586826324, 0.02431226335465908, 0.09316503256559372, -0.8497483134269714, -0.8897273540496826, -0.48522284626960754, -0.22093161940574646, -0.7118088603019714, 0.3446446657180786, -0.2551059126853943, -0.2819986939430237, -0.08563309162855148, -0.2162354588508606, -0.31567198038101196, 0.03180209547281265, 0.09825219959020615, 0.6956280469894409, -0.4297851026058197, -0.30530011653900146, -0.2505654990673065, -0.16108517348766327, 0.2573339343070984, -0.0827692449092865, 0.354382187128067, -0.43483373522758484, -0.3976950943470001, -0.428494930267334, -0.03257002308964729, 0.6884298920631409, -0.065838523209095, 0.7043030858039856, 0.46905267238616943, -0.29405713081359863, 0.012400135397911072, -0.29209235310554504, -0.24983422458171844, -0.5905927419662476, 0.32987961173057556, -0.48173657059669495, -1.0469602346420288, 0.791083574295044, 0.5369048714637756, 0.06474591791629791, 1.1301227807998657, 0.6282451152801514, -0.2602401673793793, 1.0098110437393188, 0.07148052752017975, 0.2865234315395355, 0.4031051993370056, -0.645343005657196, 0.0961439236998558, -0.9367704391479492, -0.3065701127052307, -0.5781002640724182, -0.48331141471862793, -0.7549126744270325, -0.06699395924806595, 0.25850388407707214, 0.16017039120197296, -0.698546290397644, 0.5781733393669128, -0.8464553356170654, 0.5853973627090454, 0.5778846740722656, 0.2592204213142395, 0.18167570233345032, -0.14686927199363708, -0.37307003140449524, -0.12111800163984299, -0.4257398843765259, -0.2502128779888153, 1.1915102005004883, 0.2891924977302551, 0.7176936268806458, 0.09300630539655685, 0.8756363391876221, 0.07265381515026093, -0.10194393247365952, -0.5865832567214966, 0.6389339566230774, 0.09283298254013062, -0.7613818645477295, -0.4064135253429413, -0.49459075927734375, -1.070038080215454, 0.41684243083000183, -0.10061347484588623, -0.8847472667694092, 0.11198007315397263, 0.003970273770391941, -0.16966944932937622, 0.4936774671077728, -0.5453352332115173, 0.8508703708648682, -0.17440377175807953, -0.4792494773864746, 0.10312361270189285, -0.8283131718635559, 0.51852947473526, 0.21917176246643066, 0.24500535428524017, 0.036424700170755386, 0.24157635867595673, 1.1570637226104736, -0.7929581999778748, 0.43464556336402893, 0.07169046998023987, -0.007960796356201172, 0.314154714345932, -0.16991452872753143, 0.520932137966156, 0.06458326429128647, -0.009384957142174244, -0.09958679229021072, 0.28012073040008545, -0.8577749729156494, -0.03976461663842201, 0.9064034819602966, -0.9869614243507385, -0.6402924656867981, -0.8650174140930176, -0.5061519145965576, 0.09223943948745728, 0.5344745516777039, 0.3890383243560791, 0.4817587435245514, 0.004861760884523392, 0.43284693360328674, 0.8197751641273499, -0.1581970602273941, 0.6102953553199768, 0.21750228106975555, 0.07505045086145401, -0.6428369879722595, 0.8304126262664795, 0.09725286066532135, 0.38968291878700256, 0.2594032287597656, 0.3925418257713318, -0.5348955988883972, -0.2017577886581421, -0.24863654375076294, 0.5084381103515625, -0.650614857673645, -0.2227531522512436, -0.33088287711143494, -0.39943116903305054, -0.7312514185905457, -0.6352686285972595, -0.2854466140270233, -0.50278240442276, -0.49396857619285583, -0.5084517002105713, 0.573711633682251, 0.49379804730415344, -0.3826858401298523, 0.04059058427810669, -0.4743095636367798, 0.2765970230102539, 0.32797321677207947, 0.5360394716262817, -0.365838885307312, -0.5574858784675598, 0.03762824833393097, -0.15048520267009735, -0.5792489647865295, -0.9580393433570862, 0.3295854926109314, -0.052620675414800644, 0.5338224768638611, 0.5901182293891907, 0.05387318506836891, 0.8604440093040466, -0.20694591104984283, 1.107239007949829, 0.33413228392601013, -0.7878174185752869, 0.7402732968330383, -0.3127182424068451, 0.15846265852451324, 0.6312617659568787, 0.1755342036485672, -0.2068835198879242, -0.6755982041358948, -1.2721107006072998, -0.8394317626953125, 0.6383087635040283, 0.365790456533432, -0.24434331059455872, 0.038183197379112244, 0.13906261324882507, -0.28264373540878296, -0.19628609716892242, -0.6996861696243286, -0.8976847529411316, -0.15566660463809967, -0.4832640588283539, 0.11638990789651871, 0.004515011794865131, -0.3637055456638336, -0.8252595067024231, 0.9562588930130005, -0.003101462498307228, 0.5644832253456116, 0.4961700141429901, 0.0683865174651146, 0.06444986909627914, 0.47135090827941895, 0.9220340847969055, 0.7099388837814331, -0.4370770752429962, 0.39459455013275146, 0.4409215450286865, -1.0844534635543823, 0.4854333698749542, 0.30969908833503723, -0.08214733749628067, -0.006308091804385185, 0.4862591326236725, 0.42338669300079346, 0.03539331257343292, -0.1815650463104248, 0.6117476224899292, -0.011504299007356167, -0.5746142864227295, -0.38402289152145386, 0.07774921506643295, -0.11563432216644287, 0.0066348169930279255, 0.40454596281051636, -0.22558093070983887, -0.046884264796972275, -0.4966963827610016, 0.4665805697441101, 0.3661884367465973, -0.44231653213500977, -0.1601463407278061, 0.7520285844802856, -0.17496100068092346, -0.11208239942789078, 0.33627834916114807, -0.18454793095588684, -0.6314995288848877, 1.146470546722412, 0.6077840924263, 0.6937271356582642, -0.25618889927864075, -0.07066625356674194, 0.9231858253479004, 0.40425434708595276, -0.07174822688102722, 0.5054441094398499, 0.3173408806324005, -0.25735312700271606, 0.1598028540611267, -0.8439379930496216, -0.06557124108076096, 0.18141984939575195, -0.8189437389373779, 0.35094982385635376, -0.5095884203910828, -0.15385162830352783, -0.011015663854777813, 0.4317222833633423, -0.4314940869808197, 0.5277721881866455, -0.3726608455181122, 1.2217161655426025, -0.9768780469894409, 0.7274394035339355, 0.7343972325325012, -0.5453742146492004, -1.0531808137893677, -0.5196248888969421, 0.03712504729628563, -0.8236915469169617, 0.567503809928894, -0.01322871632874012, 0.16943052411079407, -0.09517809003591537, -0.7248673439025879, -0.9205803275108337, 1.4466960430145264, -0.051762644201517105, -0.4598348140716553, 0.22324252128601074, -0.04595908895134926, 0.4460841119289398, 0.18003544211387634, 0.5891037583351135, 0.786902904510498, 0.8366731405258179, -0.09220965206623077, -0.7658308744430542, 0.3629569709300995, -0.5181592106819153, -0.3179568648338318, 0.43665990233421326, -0.9429284930229187, 1.2181674242019653, 0.006667430978268385, 0.18965160846710205, -0.15251336991786957, 0.6617494821548462, 0.8006258010864258, 0.26752743124961853, 0.33066731691360474, 0.8903965950012207, 0.8777867555618286, -0.5112166404724121, 1.0241655111312866, -0.21159414947032928, 0.8554980158805847, 0.6772245764732361, 0.20681217312812805, 0.765287458896637, 0.6970108151435852, -0.5743725299835205, 0.5766358971595764, 0.8275642991065979, -0.3365139365196228, 0.4111562967300415, 0.27468255162239075, -0.1072286069393158, -0.13575835525989532, 0.41169020533561707, -0.9009925127029419, 0.11445819586515427, 0.07048843055963516, -0.3303981423377991, 0.07220390439033508, -0.4875403642654419, 0.34276512265205383, -0.081903837621212, -0.04816019907593727, 0.3566277027130127, 0.04995984584093094, -0.4193429946899414, 0.9391672611236572, -0.1319667100906372, 0.8082306385040283, -0.5518238544464111, -0.07287095487117767, -0.38542383909225464, 0.5933700203895569, -0.4597438871860504, -1.0651743412017822, 0.1488259732723236, 0.06610579043626785, -0.12741729617118835, -0.1480274647474289, 0.7026687264442444, -0.2148093283176422, -0.7710550427436829, 0.1798134744167328, 0.07717578858137131, 0.07651995122432709, 0.5379898548126221, -0.6430524587631226, -0.3304128646850586, -0.034604769200086594, -0.6020883917808533, 0.14896686375141144, 0.3119699954986572, 0.24669037759304047, 0.5389178991317749, 0.6489982008934021, 0.1698831170797348, 0.39320650696754456, -0.5258939862251282, 0.7944405674934387, -1.0406577587127686, -0.7299720048904419, -0.9628840684890747, 0.47788840532302856, -0.31460943818092346, -0.8769814372062683, 0.9828684329986572, 1.014082670211792, 0.9197993278503418, 0.008314245380461216, 0.6748713254928589, -0.3742560148239136, 0.22543449699878693, -0.40433385968208313, 0.9156125783920288, -0.8626639246940613, -0.21628843247890472, -0.2607307732105255, -0.694705069065094, -0.37996718287467957, 0.8478411436080933, -0.16260403394699097, 0.0005714561557397246, 1.039915680885315, 0.6687535643577576, -0.08726130425930023, 0.01680154539644718, -0.07005083560943604, 0.579966127872467, 0.40237173438072205, 1.0286484956741333, 0.6436672210693359, -0.797072172164917, 0.3447435200214386, -0.5155125856399536, -0.38391056656837463, -0.38539284467697144, -0.4716204106807709, -0.8640638589859009, -0.48947301506996155, -0.21032759547233582, -0.6165785193443298, -0.12972871959209442, 1.014498233795166, 0.4017598032951355, -0.9221583008766174, -0.42049551010131836, -0.08599870651960373, 0.1582927107810974, -0.5689500570297241, -0.4085357189178467, 0.7275980114936829, -0.13121213018894196, -0.5538990497589111, 0.19572345912456512, -0.16229690611362457, 0.23057810962200165, 0.10099557787179947, -0.4320577383041382, -0.715638279914856, 0.003475125413388014, 0.4386424422264099, 0.3299955129623413, -0.736843466758728, -0.7360249161720276, 0.3024267852306366, -0.5563932061195374, 0.4374271333217621, -0.0626850426197052, -0.518710196018219, 0.00820984598249197, 0.6825233101844788, 0.49549251794815063, 0.6336430311203003, -0.0053930142894387245, 0.0745704248547554, -0.6666427254676819, 0.20551325380802155, -0.06433862447738647, 0.3056589961051941, -0.01869608834385872, -0.33339834213256836, 0.8144285082817078, 0.6834953427314758, -0.5237206816673279, -1.0759917497634888, -0.4217865467071533, -1.4322398900985718, -0.042549774050712585, 1.1621226072311401, 0.01362046878784895, -0.48178133368492126, 0.273294061422348, -0.14970852434635162, 0.19845989346504211, -0.29019594192504883, 0.7605427503585815, 0.8338704705238342, -0.32984253764152527, 0.1402716189622879, -0.660194993019104, 0.36753952503204346, 0.5160717368125916, -1.1776478290557861, -0.07646644860506058, 0.23539529740810394, 0.3068810701370239, 0.33577844500541687, 0.6383267641067505, -0.08719322085380554, 0.28384292125701904, 0.23033560812473297, 0.03363273665308952, -0.012969745323061943, 0.0797845721244812, -0.23867498338222504, 0.049312930554151535, -0.2507994472980499, -0.4572184085845947 ]
open-llm-leaderboard/details_wannaphong__openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard
open-llm-leaderboard
2023-08-27T12:39:12Z
201
0
[ "region:us" ]
null
2023-08-18T12:00:45Z
--- pretty_name: Evaluation run of wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard](https://huggingface.co/wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_wannaphong__openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-19T19:43:56.163640](https://huggingface.co/datasets/open-llm-leaderboard/details_wannaphong__openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard/blob/main/results_2023-07-19T19%3A43%3A56.163640.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3383082942783733,\n\ \ \"acc_stderr\": 0.034038904937501814,\n \"acc_norm\": 0.3424207667888371,\n\ \ \"acc_norm_stderr\": 0.03402640930744709,\n \"mc1\": 0.2741738066095471,\n\ \ \"mc1_stderr\": 0.015616518497219373,\n \"mc2\": 0.4327576136566873,\n\ \ \"mc2_stderr\": 0.015062768361653264\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.4684300341296928,\n \"acc_stderr\": 0.014582236460866984,\n\ \ \"acc_norm\": 0.5127986348122867,\n \"acc_norm_stderr\": 0.014606603181012538\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5763792073292173,\n\ \ \"acc_stderr\": 0.004931219148182242,\n \"acc_norm\": 0.7746464847639912,\n\ \ \"acc_norm_stderr\": 0.0041696102548079705\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.34814814814814815,\n\ \ \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.34814814814814815,\n\ \ \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.037827289808654685,\n\ \ \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.037827289808654685\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.42,\n\ \ \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.37735849056603776,\n \"acc_stderr\": 0.029832808114796005,\n\ \ \"acc_norm\": 0.37735849056603776,\n \"acc_norm_stderr\": 0.029832808114796005\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2916666666666667,\n\ \ \"acc_stderr\": 0.03800968060554858,\n \"acc_norm\": 0.2916666666666667,\n\ \ \"acc_norm_stderr\": 0.03800968060554858\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\ : 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3063583815028902,\n\ \ \"acc_stderr\": 0.03514942551267437,\n \"acc_norm\": 0.3063583815028902,\n\ \ \"acc_norm_stderr\": 0.03514942551267437\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171452,\n\ \ \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171452\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n\ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.3617021276595745,\n \"acc_stderr\": 0.03141082197596241,\n\ \ \"acc_norm\": 0.3617021276595745,\n \"acc_norm_stderr\": 0.03141082197596241\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2807017543859649,\n\ \ \"acc_stderr\": 0.042270544512322004,\n \"acc_norm\": 0.2807017543859649,\n\ \ \"acc_norm_stderr\": 0.042270544512322004\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n\ \ \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.24338624338624337,\n \"acc_stderr\": 0.022101128787415426,\n \"\ acc_norm\": 0.24338624338624337,\n \"acc_norm_stderr\": 0.022101128787415426\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.037184890068181146,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.037184890068181146\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.3387096774193548,\n \"acc_stderr\": 0.026923446059302834,\n \"\ acc_norm\": 0.3387096774193548,\n \"acc_norm_stderr\": 0.026923446059302834\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.28078817733990147,\n \"acc_stderr\": 0.0316185633535861,\n \"\ acc_norm\": 0.28078817733990147,\n \"acc_norm_stderr\": 0.0316185633535861\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\ : 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.41818181818181815,\n \"acc_stderr\": 0.03851716319398393,\n\ \ \"acc_norm\": 0.41818181818181815,\n \"acc_norm_stderr\": 0.03851716319398393\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.41919191919191917,\n \"acc_stderr\": 0.035155207286704175,\n \"\ acc_norm\": 0.41919191919191917,\n \"acc_norm_stderr\": 0.035155207286704175\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.42487046632124353,\n \"acc_stderr\": 0.0356747133521254,\n\ \ \"acc_norm\": 0.42487046632124353,\n \"acc_norm_stderr\": 0.0356747133521254\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.3128205128205128,\n \"acc_stderr\": 0.02350757902064535,\n \ \ \"acc_norm\": 0.3128205128205128,\n \"acc_norm_stderr\": 0.02350757902064535\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24814814814814815,\n \"acc_stderr\": 0.0263357394040558,\n \ \ \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.0263357394040558\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.02959732973097809,\n\ \ \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.02959732973097809\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.26490066225165565,\n \"acc_stderr\": 0.036030385453603854,\n \"\ acc_norm\": 0.26490066225165565,\n \"acc_norm_stderr\": 0.036030385453603854\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.3577981651376147,\n \"acc_stderr\": 0.020552060784827814,\n \"\ acc_norm\": 0.3577981651376147,\n \"acc_norm_stderr\": 0.020552060784827814\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.2824074074074074,\n \"acc_stderr\": 0.03070137211151094,\n \"\ acc_norm\": 0.2824074074074074,\n \"acc_norm_stderr\": 0.03070137211151094\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.37254901960784315,\n \"acc_stderr\": 0.03393388584958404,\n \"\ acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.03393388584958404\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.3755274261603376,\n \"acc_stderr\": 0.03152256243091156,\n \ \ \"acc_norm\": 0.3755274261603376,\n \"acc_norm_stderr\": 0.03152256243091156\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.36771300448430494,\n\ \ \"acc_stderr\": 0.03236198350928276,\n \"acc_norm\": 0.36771300448430494,\n\ \ \"acc_norm_stderr\": 0.03236198350928276\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.32061068702290074,\n \"acc_stderr\": 0.04093329229834278,\n\ \ \"acc_norm\": 0.32061068702290074,\n \"acc_norm_stderr\": 0.04093329229834278\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5702479338842975,\n \"acc_stderr\": 0.04519082021319773,\n \"\ acc_norm\": 0.5702479338842975,\n \"acc_norm_stderr\": 0.04519082021319773\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.35185185185185186,\n\ \ \"acc_stderr\": 0.04616631111801713,\n \"acc_norm\": 0.35185185185185186,\n\ \ \"acc_norm_stderr\": 0.04616631111801713\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.34355828220858897,\n \"acc_stderr\": 0.03731133519673893,\n\ \ \"acc_norm\": 0.34355828220858897,\n \"acc_norm_stderr\": 0.03731133519673893\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.21428571428571427,\n\ \ \"acc_stderr\": 0.038946411200447915,\n \"acc_norm\": 0.21428571428571427,\n\ \ \"acc_norm_stderr\": 0.038946411200447915\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.32038834951456313,\n \"acc_stderr\": 0.04620284082280039,\n\ \ \"acc_norm\": 0.32038834951456313,\n \"acc_norm_stderr\": 0.04620284082280039\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.4188034188034188,\n\ \ \"acc_stderr\": 0.03232128912157791,\n \"acc_norm\": 0.4188034188034188,\n\ \ \"acc_norm_stderr\": 0.03232128912157791\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.4125159642401022,\n\ \ \"acc_stderr\": 0.01760414910867193,\n \"acc_norm\": 0.4125159642401022,\n\ \ \"acc_norm_stderr\": 0.01760414910867193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.3439306358381503,\n \"acc_stderr\": 0.025574123786546648,\n\ \ \"acc_norm\": 0.3439306358381503,\n \"acc_norm_stderr\": 0.025574123786546648\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.02699254433929725,\n\ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.02699254433929725\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.33762057877813506,\n\ \ \"acc_stderr\": 0.026858825879488547,\n \"acc_norm\": 0.33762057877813506,\n\ \ \"acc_norm_stderr\": 0.026858825879488547\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.35802469135802467,\n \"acc_stderr\": 0.026675611926037093,\n\ \ \"acc_norm\": 0.35802469135802467,\n \"acc_norm_stderr\": 0.026675611926037093\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2695035460992908,\n \"acc_stderr\": 0.026469036818590627,\n \ \ \"acc_norm\": 0.2695035460992908,\n \"acc_norm_stderr\": 0.026469036818590627\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3089960886571056,\n\ \ \"acc_stderr\": 0.011801729777239249,\n \"acc_norm\": 0.3089960886571056,\n\ \ \"acc_norm_stderr\": 0.011801729777239249\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.33088235294117646,\n \"acc_stderr\": 0.02858270975389844,\n\ \ \"acc_norm\": 0.33088235294117646,\n \"acc_norm_stderr\": 0.02858270975389844\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.3284313725490196,\n \"acc_stderr\": 0.01899970738316267,\n \ \ \"acc_norm\": 0.3284313725490196,\n \"acc_norm_stderr\": 0.01899970738316267\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4,\n\ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.4,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.27755102040816326,\n \"acc_stderr\": 0.028666857790274648,\n\ \ \"acc_norm\": 0.27755102040816326,\n \"acc_norm_stderr\": 0.028666857790274648\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.4228855721393035,\n\ \ \"acc_stderr\": 0.034932317774212816,\n \"acc_norm\": 0.4228855721393035,\n\ \ \"acc_norm_stderr\": 0.034932317774212816\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3614457831325301,\n\ \ \"acc_stderr\": 0.037400593820293204,\n \"acc_norm\": 0.3614457831325301,\n\ \ \"acc_norm_stderr\": 0.037400593820293204\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.38011695906432746,\n \"acc_stderr\": 0.037229657413855394,\n\ \ \"acc_norm\": 0.38011695906432746,\n \"acc_norm_stderr\": 0.037229657413855394\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2741738066095471,\n\ \ \"mc1_stderr\": 0.015616518497219373,\n \"mc2\": 0.4327576136566873,\n\ \ \"mc2_stderr\": 0.015062768361653264\n }\n}\n```" repo_url: https://huggingface.co/wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|arc:challenge|25_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hellaswag|10_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:43:56.163640.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:43:56.163640.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T19_43_56.163640 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T19:43:56.163640.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T19:43:56.163640.parquet' - config_name: results data_files: - split: 2023_07_19T19_43_56.163640 path: - results_2023-07-19T19:43:56.163640.parquet - split: latest path: - results_2023-07-19T19:43:56.163640.parquet --- # Dataset Card for Evaluation run of wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard](https://huggingface.co/wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_wannaphong__openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-19T19:43:56.163640](https://huggingface.co/datasets/open-llm-leaderboard/details_wannaphong__openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard/blob/main/results_2023-07-19T19%3A43%3A56.163640.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.3383082942783733, "acc_stderr": 0.034038904937501814, "acc_norm": 0.3424207667888371, "acc_norm_stderr": 0.03402640930744709, "mc1": 0.2741738066095471, "mc1_stderr": 0.015616518497219373, "mc2": 0.4327576136566873, "mc2_stderr": 0.015062768361653264 }, "harness|arc:challenge|25": { "acc": 0.4684300341296928, "acc_stderr": 0.014582236460866984, "acc_norm": 0.5127986348122867, "acc_norm_stderr": 0.014606603181012538 }, "harness|hellaswag|10": { "acc": 0.5763792073292173, "acc_stderr": 0.004931219148182242, "acc_norm": 0.7746464847639912, "acc_norm_stderr": 0.0041696102548079705 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.34814814814814815, "acc_stderr": 0.041153246103369526, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3157894736842105, "acc_stderr": 0.037827289808654685, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.037827289808654685 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.37735849056603776, "acc_stderr": 0.029832808114796005, "acc_norm": 0.37735849056603776, "acc_norm_stderr": 0.029832808114796005 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2916666666666667, "acc_stderr": 0.03800968060554858, "acc_norm": 0.2916666666666667, "acc_norm_stderr": 0.03800968060554858 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.03514942551267437, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.03514942551267437 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171452, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171452 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3617021276595745, "acc_stderr": 0.03141082197596241, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.03141082197596241 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2807017543859649, "acc_stderr": 0.042270544512322004, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.042270544512322004 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415426, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415426 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2222222222222222, "acc_stderr": 0.037184890068181146, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.037184890068181146 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3387096774193548, "acc_stderr": 0.026923446059302834, "acc_norm": 0.3387096774193548, "acc_norm_stderr": 0.026923446059302834 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.0316185633535861, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.0316185633535861 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.41818181818181815, "acc_stderr": 0.03851716319398393, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.03851716319398393 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.41919191919191917, "acc_stderr": 0.035155207286704175, "acc_norm": 0.41919191919191917, "acc_norm_stderr": 0.035155207286704175 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.42487046632124353, "acc_stderr": 0.0356747133521254, "acc_norm": 0.42487046632124353, "acc_norm_stderr": 0.0356747133521254 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3128205128205128, "acc_stderr": 0.02350757902064535, "acc_norm": 0.3128205128205128, "acc_norm_stderr": 0.02350757902064535 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.02959732973097809, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.02959732973097809 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.036030385453603854, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.036030385453603854 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3577981651376147, "acc_stderr": 0.020552060784827814, "acc_norm": 0.3577981651376147, "acc_norm_stderr": 0.020552060784827814 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2824074074074074, "acc_stderr": 0.03070137211151094, "acc_norm": 0.2824074074074074, "acc_norm_stderr": 0.03070137211151094 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.37254901960784315, "acc_stderr": 0.03393388584958404, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.03393388584958404 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.3755274261603376, "acc_stderr": 0.03152256243091156, "acc_norm": 0.3755274261603376, "acc_norm_stderr": 0.03152256243091156 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.36771300448430494, "acc_stderr": 0.03236198350928276, "acc_norm": 0.36771300448430494, "acc_norm_stderr": 0.03236198350928276 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.32061068702290074, "acc_stderr": 0.04093329229834278, "acc_norm": 0.32061068702290074, "acc_norm_stderr": 0.04093329229834278 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5702479338842975, "acc_stderr": 0.04519082021319773, "acc_norm": 0.5702479338842975, "acc_norm_stderr": 0.04519082021319773 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.35185185185185186, "acc_stderr": 0.04616631111801713, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.04616631111801713 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.34355828220858897, "acc_stderr": 0.03731133519673893, "acc_norm": 0.34355828220858897, "acc_norm_stderr": 0.03731133519673893 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.038946411200447915, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.038946411200447915 }, "harness|hendrycksTest-management|5": { "acc": 0.32038834951456313, "acc_stderr": 0.04620284082280039, "acc_norm": 0.32038834951456313, "acc_norm_stderr": 0.04620284082280039 }, "harness|hendrycksTest-marketing|5": { "acc": 0.4188034188034188, "acc_stderr": 0.03232128912157791, "acc_norm": 0.4188034188034188, "acc_norm_stderr": 0.03232128912157791 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.4125159642401022, "acc_stderr": 0.01760414910867193, "acc_norm": 0.4125159642401022, "acc_norm_stderr": 0.01760414910867193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.3439306358381503, "acc_stderr": 0.025574123786546648, "acc_norm": 0.3439306358381503, "acc_norm_stderr": 0.025574123786546648 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02699254433929725, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02699254433929725 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.33762057877813506, "acc_stderr": 0.026858825879488547, "acc_norm": 0.33762057877813506, "acc_norm_stderr": 0.026858825879488547 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.35802469135802467, "acc_stderr": 0.026675611926037093, "acc_norm": 0.35802469135802467, "acc_norm_stderr": 0.026675611926037093 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590627, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590627 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3089960886571056, "acc_stderr": 0.011801729777239249, "acc_norm": 0.3089960886571056, "acc_norm_stderr": 0.011801729777239249 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.33088235294117646, "acc_stderr": 0.02858270975389844, "acc_norm": 0.33088235294117646, "acc_norm_stderr": 0.02858270975389844 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3284313725490196, "acc_stderr": 0.01899970738316267, "acc_norm": 0.3284313725490196, "acc_norm_stderr": 0.01899970738316267 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4, "acc_stderr": 0.0469237132203465, "acc_norm": 0.4, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.27755102040816326, "acc_stderr": 0.028666857790274648, "acc_norm": 0.27755102040816326, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.4228855721393035, "acc_stderr": 0.034932317774212816, "acc_norm": 0.4228855721393035, "acc_norm_stderr": 0.034932317774212816 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.037400593820293204, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.037400593820293204 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.38011695906432746, "acc_stderr": 0.037229657413855394, "acc_norm": 0.38011695906432746, "acc_norm_stderr": 0.037229657413855394 }, "harness|truthfulqa:mc|0": { "mc1": 0.2741738066095471, "mc1_stderr": 0.015616518497219373, "mc2": 0.4327576136566873, "mc2_stderr": 0.015062768361653264 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6992210149765015, -0.8672113418579102, 0.26748979091644287, 0.18216875195503235, -0.18181991577148438, -0.10329081863164902, -0.019348647445440292, -0.21976158022880554, 0.5725635886192322, -0.03763701766729355, -0.5011782646179199, -0.7609590291976929, -0.4431723356246948, 0.21431593596935272, -0.040006112307310104, 0.8276795744895935, -0.19626730680465698, -0.12882216274738312, 0.07924320548772812, -0.03729899972677231, -0.283194363117218, -0.33857864141464233, -0.47621622681617737, -0.30125609040260315, 0.15716052055358887, 0.4374268651008606, 0.40716788172721863, 0.7974681854248047, 0.7194591760635376, 0.29058408737182617, -0.2822115421295166, -0.02242119237780571, -0.2000744491815567, -0.26049381494522095, 0.4147474765777588, -0.3719192445278168, -0.8965224623680115, 0.3188167214393616, 0.7720595002174377, 0.5906350016593933, -0.06984354555606842, 0.29711198806762695, 0.06467464566230774, 0.5723272562026978, -0.3353515565395355, 0.09808844327926636, -0.266417920589447, 0.22732359170913696, -0.20373964309692383, -0.29985949397087097, -0.2867964804172516, -0.2901204824447632, -0.1023755669593811, -0.8547985553741455, 0.26593217253685, 0.3230466842651367, 1.5493049621582031, -0.12129028886556625, -0.19580496847629547, 0.10913579165935516, -0.13185714185237885, 0.9867769479751587, -0.9043779969215393, 0.3749951720237732, 0.7763167023658752, 0.12351451814174652, -0.1769333779811859, -0.5984703302383423, -0.5761668086051941, 0.058551788330078125, -0.3424227237701416, 0.3577483892440796, -0.04777613282203674, -0.16320301592350006, 0.3053698241710663, 0.6976834535598755, -0.6829614043235779, 0.19406285881996155, -0.6528552770614624, -0.16828510165214539, 1.030250072479248, 0.30934542417526245, 0.08892620354890823, -0.35556167364120483, -0.705731987953186, -0.6333788633346558, -0.4054936468601227, 0.25272491574287415, 0.4153720438480377, 0.421822726726532, -0.4262312352657318, 0.7024604082107544, -0.37760844826698303, 0.5715851187705994, 0.3738003671169281, 0.025163395330309868, 0.8590136170387268, -0.6585397720336914, -0.5334107875823975, -0.03636409342288971, 1.107657551765442, 0.5886080265045166, 0.07726690918207169, 0.21746855974197388, 0.04406782612204552, -0.13325047492980957, 0.016169849783182144, -0.8624694347381592, -0.2939927279949188, 0.24395404756069183, -0.42165839672088623, -0.4885016083717346, 0.36254510283470154, -0.8869985938072205, 0.16580110788345337, 0.0033309694845229387, 0.4184364974498749, -0.5321134924888611, -0.12084105610847473, 0.2561856210231781, -0.39713868498802185, 0.7972575426101685, -0.1542055457830429, -0.7582645416259766, 0.39483779668807983, 0.5287728905677795, 0.8153077960014343, -0.10977507382631302, -0.4429754912853241, -0.16573309898376465, -0.1399945318698883, -0.2646699547767639, 0.4987020194530487, -0.22706389427185059, -0.4038591980934143, -0.2997473180294037, 0.26050737500190735, -0.25336262583732605, -0.31506702303886414, 0.7442793846130371, -0.23336295783519745, 0.2633870840072632, -0.38517263531684875, -0.6472886800765991, 0.1392037570476532, 0.403446227312088, -0.41611647605895996, 1.247487187385559, 0.20362675189971924, -0.834040641784668, 0.39905381202697754, -0.6104140877723694, -0.1376095414161682, -0.0010771265951916575, -0.039731819182634354, -0.750619649887085, -0.24693424999713898, 0.1985260546207428, 0.41712501645088196, -0.14052100479602814, -0.14903554320335388, -0.3835809826850891, -0.31794413924217224, 0.3196077346801758, -0.14500387012958527, 1.2099993228912354, -0.028871141374111176, -0.6834130883216858, -0.04248553514480591, -1.2001224756240845, 0.34179288148880005, 0.19997917115688324, -0.37664932012557983, -0.2042451649904251, -0.44903141260147095, -0.02145502343773842, 0.1835719645023346, 0.2960798740386963, -0.8228471279144287, 0.2945297658443451, -0.37937280535697937, 0.1679709255695343, 1.303234338760376, 0.015818284824490547, 0.12908731400966644, -0.554479718208313, 0.5393551588058472, 0.19668160378932953, 0.21467037498950958, 0.38688546419143677, -0.6414192318916321, -0.7612612247467041, -0.5269627571105957, 0.007323889527469873, 0.5738208889961243, -0.1536923348903656, 1.1408185958862305, 0.04267653077840805, -0.8820413947105408, -0.5241444110870361, -0.17652152478694916, 0.49001750349998474, 0.7698118090629578, 0.603702187538147, -0.08868686854839325, -0.6364611387252808, -1.0822101831436157, -0.25839683413505554, -0.19524039328098297, 0.13959071040153503, 0.2172827422618866, 1.0325502157211304, -0.2206258475780487, 0.6085862517356873, -1.0376769304275513, -0.25805214047431946, 0.1969519704580307, -0.08611548691987991, 0.7910508513450623, 0.7373499274253845, 0.5688048005104065, -0.6751290559768677, -0.5318475961685181, 0.14728455245494843, -0.8935483694076538, -0.06389952450990677, 0.18870291113853455, -0.3170723617076874, 0.12985044717788696, 0.130747988820076, -0.7027392983436584, 0.5370460152626038, 0.23247523605823517, -1.0616782903671265, 1.059334635734558, -0.3402344286441803, 0.5765439867973328, -1.0139752626419067, 0.18406148254871368, -0.028243212029337883, 0.022906988859176636, -0.49474000930786133, 0.08729954808950424, 0.08220133185386658, 0.4353562295436859, -0.5089918375015259, 0.8250016570091248, -0.6822689771652222, -0.05254916474223137, 0.45957377552986145, 0.151805117726326, -0.17537231743335724, 0.3574177920818329, -0.2408180683851242, 0.756743311882019, 0.7771739363670349, -0.48561474680900574, 0.5090546607971191, 0.44513431191444397, -0.22667060792446136, 0.698067307472229, -0.4625546634197235, -0.29544201493263245, 0.288390189409256, -0.055939603596925735, -0.8556289076805115, -0.4975409209728241, 0.05484987795352936, -0.6084287166595459, -0.10677402466535568, 0.3370307683944702, -0.2972584068775177, -0.8082658648490906, -0.9651163220405579, 0.315488338470459, 0.7274755239486694, -0.4460148513317108, -0.17901168763637543, 0.05577423423528671, 0.10228385776281357, -0.7930479645729065, -0.832886815071106, -0.5197851061820984, -0.20469847321510315, -0.7368152141571045, 0.3225807249546051, -0.30067646503448486, -0.2721906304359436, -0.08865398913621902, -0.22251059114933014, -0.3121412992477417, -0.017214303836226463, 0.1657242327928543, 0.6590222716331482, -0.4343944489955902, -0.280103474855423, -0.2995426058769226, -0.17562299966812134, 0.21211984753608704, -0.1716364473104477, 0.41549554467201233, -0.4837203025817871, -0.40749797224998474, -0.43050500750541687, -0.04090464115142822, 0.7283278107643127, -0.11570758372545242, 0.7324452996253967, 0.3981231451034546, -0.3008812367916107, -0.032355085015296936, -0.2573561370372772, -0.28733348846435547, -0.5757564902305603, 0.2705783247947693, -0.4684249460697174, -1.0410975217819214, 0.7788028717041016, 0.5633984804153442, 0.1074555441737175, 1.124923825263977, 0.612897515296936, -0.2833291292190552, 1.0067040920257568, 0.09544678777456284, 0.32118144631385803, 0.36704862117767334, -0.650272786617279, 0.10816428810358047, -0.9184179306030273, -0.3234032094478607, -0.5824511051177979, -0.49682480096817017, -0.7184277772903442, -0.0932391881942749, 0.27491432428359985, 0.15850839018821716, -0.6625872254371643, 0.5290532112121582, -0.8558647036552429, 0.5446703433990479, 0.5792354345321655, 0.27554020285606384, 0.19794943928718567, -0.14446109533309937, -0.4113447666168213, -0.12465646117925644, -0.45814278721809387, -0.24993330240249634, 1.2601158618927002, 0.26886385679244995, 0.7366905808448792, 0.0999593660235405, 0.8966894745826721, 0.10327423363924026, -0.09626656770706177, -0.5716592073440552, 0.6735367774963379, 0.10949435085058212, -0.7788682579994202, -0.4080982506275177, -0.49571114778518677, -1.0735843181610107, 0.3596646189689636, -0.09547287225723267, -0.8884161114692688, 0.09523420035839081, 0.04796799644827843, -0.1849110722541809, 0.5366989970207214, -0.5185891389846802, 0.8220839500427246, -0.16263043880462646, -0.44494807720184326, 0.13788315653800964, -0.8255490660667419, 0.4891381859779358, 0.19033566117286682, 0.24740758538246155, 0.0665041133761406, 0.2536998391151428, 1.1826636791229248, -0.8368584513664246, 0.4284791946411133, 0.07452135533094406, 0.02757355570793152, 0.3018563389778137, -0.17837002873420715, 0.4759726822376251, 0.03845316171646118, -0.03497985005378723, -0.09246088564395905, 0.2499595433473587, -0.8390548229217529, -0.026410825550556183, 0.9522366523742676, -0.9732323288917542, -0.5780839323997498, -0.8641690611839294, -0.5083765983581543, 0.08281340450048447, 0.5776450037956238, 0.39197662472724915, 0.5149768590927124, -0.018872704356908798, 0.44509631395339966, 0.8397091627120972, -0.12841734290122986, 0.5757545828819275, 0.22015203535556793, 0.00677404785528779, -0.6312029361724854, 0.8268923759460449, 0.09689822793006897, 0.3481074273586273, 0.2669687271118164, 0.3893562853336334, -0.5221320986747742, -0.25965824723243713, -0.18958112597465515, 0.5064052939414978, -0.6107795834541321, -0.24388794600963593, -0.3860672414302826, -0.3982217311859131, -0.7624558806419373, -0.556465208530426, -0.32065069675445557, -0.5268688797950745, -0.463079571723938, -0.4907897412776947, 0.6091910600662231, 0.44976624846458435, -0.3751617968082428, 0.042004264891147614, -0.5026324391365051, 0.2702009677886963, 0.29995080828666687, 0.609167754650116, -0.3536577522754669, -0.5524142384529114, 0.04092676192522049, -0.10976674407720566, -0.5533238649368286, -0.9771064519882202, 0.34659478068351746, -0.045039404183626175, 0.5199344158172607, 0.5476133227348328, 0.071126289665699, 0.8100438117980957, -0.17847049236297607, 1.0571566820144653, 0.30874335765838623, -0.7372325658798218, 0.7572513222694397, -0.351399689912796, 0.1927940845489502, 0.623056948184967, 0.21287155151367188, -0.1859310418367386, -0.6601560711860657, -1.268178939819336, -0.8243609070777893, 0.6905624866485596, 0.42230311036109924, -0.3166190981864929, 0.0464569516479969, 0.15651331841945648, -0.26158297061920166, -0.15163394808769226, -0.702401340007782, -0.87637859582901, -0.15453949570655823, -0.47446173429489136, 0.1019883006811142, 0.06997937709093094, -0.37741729617118835, -0.799156129360199, 0.9676989316940308, 0.01563997007906437, 0.6208301782608032, 0.47343459725379944, 0.07496107369661331, 0.0261548962444067, 0.49565571546554565, 0.8951700925827026, 0.7171940803527832, -0.5347293615341187, 0.38555946946144104, 0.3357325494289398, -1.0202559232711792, 0.4284215569496155, 0.3386690616607666, -0.0682261660695076, -0.0439005121588707, 0.46678176522254944, 0.43689998984336853, -0.001721442211419344, -0.2197825312614441, 0.6012848019599915, -0.04223159700632095, -0.560591995716095, -0.34735801815986633, 0.09029590338468552, -0.12189130485057831, 0.009327993728220463, 0.39977848529815674, -0.17506729066371918, -0.09108666330575943, -0.5138235092163086, 0.44840362668037415, 0.3650023937225342, -0.499972403049469, -0.18342794477939606, 0.7541192770004272, -0.1765529215335846, -0.2032603621482849, 0.3887102007865906, -0.15400823950767517, -0.611977756023407, 1.1120363473892212, 0.5631841421127319, 0.7341110706329346, -0.2551426589488983, -0.0751325860619545, 0.8938341736793518, 0.3744382858276367, -0.0475323311984539, 0.5428541898727417, 0.32362720370292664, -0.2408345639705658, 0.1703127920627594, -0.889901340007782, -0.03470666706562042, 0.18467006087303162, -0.8097717761993408, 0.34637686610221863, -0.5169900059700012, -0.19747565686702728, 0.014689676463603973, 0.36864984035491943, -0.476553738117218, 0.5087476968765259, -0.4045952260494232, 1.2278045415878296, -0.9458535313606262, 0.6814823150634766, 0.7771610021591187, -0.5538325309753418, -0.9920663237571716, -0.4874182641506195, 0.024937838315963745, -0.7890648245811462, 0.5720275044441223, -0.027310017496347427, 0.20521999895572662, -0.041784562170505524, -0.7333195209503174, -0.8977643251419067, 1.3675373792648315, -0.07195869088172913, -0.44736504554748535, 0.15071870386600494, -0.07577283680438995, 0.4664499759674072, 0.11412932723760605, 0.5715540051460266, 0.6992535591125488, 0.8389878869056702, -0.13323566317558289, -0.7872868776321411, 0.3455716371536255, -0.5077202320098877, -0.3621262013912201, 0.4963882267475128, -0.8982663154602051, 1.247805118560791, -0.013485017232596874, 0.20489154756069183, -0.13100872933864594, 0.615980327129364, 0.7926775217056274, 0.2556810677051544, 0.31795093417167664, 0.8975828289985657, 0.8277835845947266, -0.5149404406547546, 1.0323883295059204, -0.20467285811901093, 0.8696683645248413, 0.7276175618171692, 0.2107066512107849, 0.7713133692741394, 0.6604001522064209, -0.5335676074028015, 0.53675776720047, 0.7918553948402405, -0.3058614134788513, 0.4113959074020386, 0.25454458594322205, -0.07209597527980804, -0.11023573577404022, 0.44323283433914185, -0.8874354362487793, 0.12046995013952255, 0.050965312868356705, -0.35340315103530884, 0.030589820817112923, -0.40653106570243835, 0.2649311423301697, -0.10487344115972519, -0.035951584577560425, 0.35961633920669556, 0.0476989708840847, -0.4559762477874756, 0.9260714054107666, -0.10550978779792786, 0.7790303230285645, -0.5632532238960266, -0.0712125301361084, -0.33577799797058105, 0.6037168502807617, -0.43379202485084534, -1.0452520847320557, 0.11364541947841644, 0.05288657918572426, -0.12243284285068512, -0.18605509400367737, 0.7084871530532837, -0.1987859457731247, -0.7133715152740479, 0.16599762439727783, 0.126276895403862, 0.1315961629152298, 0.4844728708267212, -0.6540433764457703, -0.3489843010902405, -0.03016982413828373, -0.5543168783187866, 0.12779715657234192, 0.27330946922302246, 0.2567434310913086, 0.534422755241394, 0.6489089131355286, 0.2012057900428772, 0.4225596487522125, -0.5690671801567078, 0.7711616158485413, -1.0431840419769287, -0.7598854303359985, -0.9436150193214417, 0.4799622595310211, -0.30072563886642456, -0.8641416430473328, 1.022214651107788, 1.0560777187347412, 0.8802298307418823, -0.007237580604851246, 0.6365984678268433, -0.39404046535491943, 0.25931039452552795, -0.38468462228775024, 0.9491956233978271, -0.8559234738349915, -0.18793004751205444, -0.2620507478713989, -0.7207760810852051, -0.36914941668510437, 0.8031127452850342, -0.1626848727464676, 0.05043584853410721, 1.0342074632644653, 0.6574772000312805, -0.16283296048641205, 0.07330544292926788, -0.06219221279025078, 0.6308082938194275, 0.4004693329334259, 0.9545485973358154, 0.6233344674110413, -0.7749961614608765, 0.3118463158607483, -0.4865345358848572, -0.43568211793899536, -0.4084814190864563, -0.4715503752231598, -0.8580226898193359, -0.49898943305015564, -0.22839652001857758, -0.6261392831802368, -0.07137786597013474, 0.9967589378356934, 0.486911803483963, -0.8952620029449463, -0.419402539730072, -0.1390872746706009, 0.1391967386007309, -0.5827804207801819, -0.4144083857536316, 0.7404253482818604, -0.08537008613348007, -0.5277947783470154, 0.19893206655979156, -0.12736265361309052, 0.20807042717933655, 0.08498314768075943, -0.4248964786529541, -0.766135573387146, -0.001266434439457953, 0.4282536208629608, 0.3415716588497162, -0.6989941000938416, -0.6846787929534912, 0.27195805311203003, -0.4997868537902832, 0.4671643376350403, -0.020860780030488968, -0.5407406687736511, 0.08730761706829071, 0.7341079711914062, 0.41563543677330017, 0.6640108227729797, -0.03796273097395897, 0.10663925856351852, -0.6914359331130981, 0.14315631985664368, 0.02733534201979637, 0.2796880900859833, -0.059838030487298965, -0.27892598509788513, 0.7771518230438232, 0.6333810091018677, -0.5293881893157959, -1.0896044969558716, -0.41635164618492126, -1.433995008468628, -0.06004926562309265, 1.0928986072540283, 0.021013138815760612, -0.47779521346092224, 0.27388468384742737, -0.11706563085317612, 0.22495602071285248, -0.3405044972896576, 0.7366117835044861, 0.7932543754577637, -0.3478771150112152, 0.12427154183387756, -0.686832845211029, 0.3570787012577057, 0.499176561832428, -1.2012839317321777, -0.0912148579955101, 0.26428842544555664, 0.31567028164863586, 0.34936726093292236, 0.662990927696228, -0.1410098522901535, 0.25872349739074707, 0.17293667793273926, 0.06519479304552078, -0.03676776587963104, 0.014821235090494156, -0.2358904778957367, 0.13334326446056366, -0.23181115090847015, -0.4857349097728729 ]
open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-40k-sharegpt
open-llm-leaderboard
2023-08-27T12:39:26Z
201
0
[ "region:us" ]
null
2023-08-18T12:02:04Z
--- pretty_name: Evaluation run of yihan6324/llama2-7b-instructmining-40k-sharegpt dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yihan6324/llama2-7b-instructmining-40k-sharegpt](https://huggingface.co/yihan6324/llama2-7b-instructmining-40k-sharegpt)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-40k-sharegpt\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-09T21:00:12.284244](https://huggingface.co/datasets/open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-40k-sharegpt/blob/main/results_2023-08-09T21%3A00%3A12.284244.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.506231001015833,\n\ \ \"acc_stderr\": 0.03505018845563652,\n \"acc_norm\": 0.5099522031118208,\n\ \ \"acc_norm_stderr\": 0.035035258453899244,\n \"mc1\": 0.36474908200734396,\n\ \ \"mc1_stderr\": 0.01685096106172012,\n \"mc2\": 0.5317717765572597,\n\ \ \"mc2_stderr\": 0.015775374488304787\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5170648464163823,\n \"acc_stderr\": 0.014602878388536595,\n\ \ \"acc_norm\": 0.5511945392491467,\n \"acc_norm_stderr\": 0.014534599585097664\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6041625174268074,\n\ \ \"acc_stderr\": 0.004880303863138504,\n \"acc_norm\": 0.7895837482573193,\n\ \ \"acc_norm_stderr\": 0.0040677125640782895\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.4605263157894737,\n \"acc_stderr\": 0.04056242252249034,\n\ \ \"acc_norm\": 0.4605263157894737,\n \"acc_norm_stderr\": 0.04056242252249034\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n\ \ \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \ \ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5245283018867924,\n \"acc_stderr\": 0.030735822206205608,\n\ \ \"acc_norm\": 0.5245283018867924,\n \"acc_norm_stderr\": 0.030735822206205608\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4930555555555556,\n\ \ \"acc_stderr\": 0.04180806750294938,\n \"acc_norm\": 0.4930555555555556,\n\ \ \"acc_norm_stderr\": 0.04180806750294938\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\"\ : 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4682080924855491,\n\ \ \"acc_stderr\": 0.03804749744364763,\n \"acc_norm\": 0.4682080924855491,\n\ \ \"acc_norm_stderr\": 0.03804749744364763\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.03793281185307809,\n\ \ \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.03793281185307809\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n\ \ \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4723404255319149,\n \"acc_stderr\": 0.03263597118409769,\n\ \ \"acc_norm\": 0.4723404255319149,\n \"acc_norm_stderr\": 0.03263597118409769\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n\ \ \"acc_stderr\": 0.040969851398436716,\n \"acc_norm\": 0.2543859649122807,\n\ \ \"acc_norm_stderr\": 0.040969851398436716\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.04166567577101579,\n\ \ \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.04166567577101579\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3148148148148148,\n \"acc_stderr\": 0.02391998416404773,\n \"\ acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02391998416404773\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3492063492063492,\n\ \ \"acc_stderr\": 0.04263906892795133,\n \"acc_norm\": 0.3492063492063492,\n\ \ \"acc_norm_stderr\": 0.04263906892795133\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.532258064516129,\n\ \ \"acc_stderr\": 0.028384747788813332,\n \"acc_norm\": 0.532258064516129,\n\ \ \"acc_norm_stderr\": 0.028384747788813332\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.35467980295566504,\n \"acc_stderr\": 0.0336612448905145,\n\ \ \"acc_norm\": 0.35467980295566504,\n \"acc_norm_stderr\": 0.0336612448905145\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\"\ : 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n\ \ \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5959595959595959,\n \"acc_stderr\": 0.034961309720561294,\n \"\ acc_norm\": 0.5959595959595959,\n \"acc_norm_stderr\": 0.034961309720561294\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7512953367875648,\n \"acc_stderr\": 0.031195840877700286,\n\ \ \"acc_norm\": 0.7512953367875648,\n \"acc_norm_stderr\": 0.031195840877700286\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4564102564102564,\n \"acc_stderr\": 0.025254485424799605,\n\ \ \"acc_norm\": 0.4564102564102564,\n \"acc_norm_stderr\": 0.025254485424799605\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085622,\n \ \ \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.42436974789915966,\n \"acc_stderr\": 0.03210479051015776,\n\ \ \"acc_norm\": 0.42436974789915966,\n \"acc_norm_stderr\": 0.03210479051015776\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"\ acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.6990825688073394,\n \"acc_stderr\": 0.019664751366802114,\n \"\ acc_norm\": 0.6990825688073394,\n \"acc_norm_stderr\": 0.019664751366802114\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.3425925925925926,\n \"acc_stderr\": 0.032365852526021574,\n \"\ acc_norm\": 0.3425925925925926,\n \"acc_norm_stderr\": 0.032365852526021574\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.6764705882352942,\n \"acc_stderr\": 0.032834720561085606,\n \"\ acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.032834720561085606\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7172995780590717,\n \"acc_stderr\": 0.029312814153955934,\n \ \ \"acc_norm\": 0.7172995780590717,\n \"acc_norm_stderr\": 0.029312814153955934\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5964125560538116,\n\ \ \"acc_stderr\": 0.03292802819330314,\n \"acc_norm\": 0.5964125560538116,\n\ \ \"acc_norm_stderr\": 0.03292802819330314\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6183206106870229,\n \"acc_stderr\": 0.0426073515764456,\n\ \ \"acc_norm\": 0.6183206106870229,\n \"acc_norm_stderr\": 0.0426073515764456\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6115702479338843,\n \"acc_stderr\": 0.044492703500683836,\n \"\ acc_norm\": 0.6115702479338843,\n \"acc_norm_stderr\": 0.044492703500683836\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6111111111111112,\n\ \ \"acc_stderr\": 0.0471282125742677,\n \"acc_norm\": 0.6111111111111112,\n\ \ \"acc_norm_stderr\": 0.0471282125742677\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5828220858895705,\n \"acc_stderr\": 0.038741028598180814,\n\ \ \"acc_norm\": 0.5828220858895705,\n \"acc_norm_stderr\": 0.038741028598180814\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n\ \ \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n\ \ \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6601941747572816,\n \"acc_stderr\": 0.04689765937278135,\n\ \ \"acc_norm\": 0.6601941747572816,\n \"acc_norm_stderr\": 0.04689765937278135\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7564102564102564,\n\ \ \"acc_stderr\": 0.028120966503914397,\n \"acc_norm\": 0.7564102564102564,\n\ \ \"acc_norm_stderr\": 0.028120966503914397\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \ \ \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6794380587484036,\n\ \ \"acc_stderr\": 0.01668889331080376,\n \"acc_norm\": 0.6794380587484036,\n\ \ \"acc_norm_stderr\": 0.01668889331080376\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5635838150289018,\n \"acc_stderr\": 0.02670054542494367,\n\ \ \"acc_norm\": 0.5635838150289018,\n \"acc_norm_stderr\": 0.02670054542494367\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.264804469273743,\n\ \ \"acc_stderr\": 0.014756906483260659,\n \"acc_norm\": 0.264804469273743,\n\ \ \"acc_norm_stderr\": 0.014756906483260659\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5196078431372549,\n \"acc_stderr\": 0.028607893699576066,\n\ \ \"acc_norm\": 0.5196078431372549,\n \"acc_norm_stderr\": 0.028607893699576066\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5916398713826366,\n\ \ \"acc_stderr\": 0.027917050748484627,\n \"acc_norm\": 0.5916398713826366,\n\ \ \"acc_norm_stderr\": 0.027917050748484627\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5216049382716049,\n \"acc_stderr\": 0.027794760105008736,\n\ \ \"acc_norm\": 0.5216049382716049,\n \"acc_norm_stderr\": 0.027794760105008736\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.39361702127659576,\n \"acc_stderr\": 0.02914454478159615,\n \ \ \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.02914454478159615\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3820078226857888,\n\ \ \"acc_stderr\": 0.012409564470235565,\n \"acc_norm\": 0.3820078226857888,\n\ \ \"acc_norm_stderr\": 0.012409564470235565\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.03033257809455504,\n\ \ \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.03033257809455504\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.4918300653594771,\n \"acc_stderr\": 0.02022513434305726,\n \ \ \"acc_norm\": 0.4918300653594771,\n \"acc_norm_stderr\": 0.02022513434305726\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n\ \ \"acc_stderr\": 0.04709306978661896,\n \"acc_norm\": 0.5909090909090909,\n\ \ \"acc_norm_stderr\": 0.04709306978661896\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5918367346938775,\n \"acc_stderr\": 0.03146465712827424,\n\ \ \"acc_norm\": 0.5918367346938775,\n \"acc_norm_stderr\": 0.03146465712827424\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6915422885572139,\n\ \ \"acc_stderr\": 0.03265819588512698,\n \"acc_norm\": 0.6915422885572139,\n\ \ \"acc_norm_stderr\": 0.03265819588512698\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \ \ \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n\ \ \"acc_stderr\": 0.03799857454479636,\n \"acc_norm\": 0.39156626506024095,\n\ \ \"acc_norm_stderr\": 0.03799857454479636\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7134502923976608,\n \"acc_stderr\": 0.03467826685703826,\n\ \ \"acc_norm\": 0.7134502923976608,\n \"acc_norm_stderr\": 0.03467826685703826\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36474908200734396,\n\ \ \"mc1_stderr\": 0.01685096106172012,\n \"mc2\": 0.5317717765572597,\n\ \ \"mc2_stderr\": 0.015775374488304787\n }\n}\n```" repo_url: https://huggingface.co/yihan6324/llama2-7b-instructmining-40k-sharegpt leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|arc:challenge|25_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hellaswag|10_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-09T21:00:12.284244.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-management|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T21:00:12.284244.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_09T21_00_12.284244 path: - '**/details_harness|truthfulqa:mc|0_2023-08-09T21:00:12.284244.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-09T21:00:12.284244.parquet' - config_name: results data_files: - split: 2023_08_09T21_00_12.284244 path: - results_2023-08-09T21:00:12.284244.parquet - split: latest path: - results_2023-08-09T21:00:12.284244.parquet --- # Dataset Card for Evaluation run of yihan6324/llama2-7b-instructmining-40k-sharegpt ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yihan6324/llama2-7b-instructmining-40k-sharegpt - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yihan6324/llama2-7b-instructmining-40k-sharegpt](https://huggingface.co/yihan6324/llama2-7b-instructmining-40k-sharegpt) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-40k-sharegpt", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-09T21:00:12.284244](https://huggingface.co/datasets/open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-40k-sharegpt/blob/main/results_2023-08-09T21%3A00%3A12.284244.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.506231001015833, "acc_stderr": 0.03505018845563652, "acc_norm": 0.5099522031118208, "acc_norm_stderr": 0.035035258453899244, "mc1": 0.36474908200734396, "mc1_stderr": 0.01685096106172012, "mc2": 0.5317717765572597, "mc2_stderr": 0.015775374488304787 }, "harness|arc:challenge|25": { "acc": 0.5170648464163823, "acc_stderr": 0.014602878388536595, "acc_norm": 0.5511945392491467, "acc_norm_stderr": 0.014534599585097664 }, "harness|hellaswag|10": { "acc": 0.6041625174268074, "acc_stderr": 0.004880303863138504, "acc_norm": 0.7895837482573193, "acc_norm_stderr": 0.0040677125640782895 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4605263157894737, "acc_stderr": 0.04056242252249034, "acc_norm": 0.4605263157894737, "acc_norm_stderr": 0.04056242252249034 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5245283018867924, "acc_stderr": 0.030735822206205608, "acc_norm": 0.5245283018867924, "acc_norm_stderr": 0.030735822206205608 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4930555555555556, "acc_stderr": 0.04180806750294938, "acc_norm": 0.4930555555555556, "acc_norm_stderr": 0.04180806750294938 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4682080924855491, "acc_stderr": 0.03804749744364763, "acc_norm": 0.4682080924855491, "acc_norm_stderr": 0.03804749744364763 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307809, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307809 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4723404255319149, "acc_stderr": 0.03263597118409769, "acc_norm": 0.4723404255319149, "acc_norm_stderr": 0.03263597118409769 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.04166567577101579, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02391998416404773, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02391998416404773 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795133, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795133 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.532258064516129, "acc_stderr": 0.028384747788813332, "acc_norm": 0.532258064516129, "acc_norm_stderr": 0.028384747788813332 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.35467980295566504, "acc_stderr": 0.0336612448905145, "acc_norm": 0.35467980295566504, "acc_norm_stderr": 0.0336612448905145 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5959595959595959, "acc_stderr": 0.034961309720561294, "acc_norm": 0.5959595959595959, "acc_norm_stderr": 0.034961309720561294 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7512953367875648, "acc_stderr": 0.031195840877700286, "acc_norm": 0.7512953367875648, "acc_norm_stderr": 0.031195840877700286 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4564102564102564, "acc_stderr": 0.025254485424799605, "acc_norm": 0.4564102564102564, "acc_norm_stderr": 0.025254485424799605 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.42436974789915966, "acc_stderr": 0.03210479051015776, "acc_norm": 0.42436974789915966, "acc_norm_stderr": 0.03210479051015776 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6990825688073394, "acc_stderr": 0.019664751366802114, "acc_norm": 0.6990825688073394, "acc_norm_stderr": 0.019664751366802114 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3425925925925926, "acc_stderr": 0.032365852526021574, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.032365852526021574 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6764705882352942, "acc_stderr": 0.032834720561085606, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.032834720561085606 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7172995780590717, "acc_stderr": 0.029312814153955934, "acc_norm": 0.7172995780590717, "acc_norm_stderr": 0.029312814153955934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5964125560538116, "acc_stderr": 0.03292802819330314, "acc_norm": 0.5964125560538116, "acc_norm_stderr": 0.03292802819330314 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6183206106870229, "acc_stderr": 0.0426073515764456, "acc_norm": 0.6183206106870229, "acc_norm_stderr": 0.0426073515764456 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.044492703500683836, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.044492703500683836 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6111111111111112, "acc_stderr": 0.0471282125742677, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.0471282125742677 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5828220858895705, "acc_stderr": 0.038741028598180814, "acc_norm": 0.5828220858895705, "acc_norm_stderr": 0.038741028598180814 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.6601941747572816, "acc_stderr": 0.04689765937278135, "acc_norm": 0.6601941747572816, "acc_norm_stderr": 0.04689765937278135 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7564102564102564, "acc_stderr": 0.028120966503914397, "acc_norm": 0.7564102564102564, "acc_norm_stderr": 0.028120966503914397 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6794380587484036, "acc_stderr": 0.01668889331080376, "acc_norm": 0.6794380587484036, "acc_norm_stderr": 0.01668889331080376 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5635838150289018, "acc_stderr": 0.02670054542494367, "acc_norm": 0.5635838150289018, "acc_norm_stderr": 0.02670054542494367 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.264804469273743, "acc_stderr": 0.014756906483260659, "acc_norm": 0.264804469273743, "acc_norm_stderr": 0.014756906483260659 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5196078431372549, "acc_stderr": 0.028607893699576066, "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.028607893699576066 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5916398713826366, "acc_stderr": 0.027917050748484627, "acc_norm": 0.5916398713826366, "acc_norm_stderr": 0.027917050748484627 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5216049382716049, "acc_stderr": 0.027794760105008736, "acc_norm": 0.5216049382716049, "acc_norm_stderr": 0.027794760105008736 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.02914454478159615, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.02914454478159615 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3820078226857888, "acc_stderr": 0.012409564470235565, "acc_norm": 0.3820078226857888, "acc_norm_stderr": 0.012409564470235565 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5257352941176471, "acc_stderr": 0.03033257809455504, "acc_norm": 0.5257352941176471, "acc_norm_stderr": 0.03033257809455504 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4918300653594771, "acc_stderr": 0.02022513434305726, "acc_norm": 0.4918300653594771, "acc_norm_stderr": 0.02022513434305726 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661896, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661896 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5918367346938775, "acc_stderr": 0.03146465712827424, "acc_norm": 0.5918367346938775, "acc_norm_stderr": 0.03146465712827424 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6915422885572139, "acc_stderr": 0.03265819588512698, "acc_norm": 0.6915422885572139, "acc_norm_stderr": 0.03265819588512698 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479636, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479636 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7134502923976608, "acc_stderr": 0.03467826685703826, "acc_norm": 0.7134502923976608, "acc_norm_stderr": 0.03467826685703826 }, "harness|truthfulqa:mc|0": { "mc1": 0.36474908200734396, "mc1_stderr": 0.01685096106172012, "mc2": 0.5317717765572597, "mc2_stderr": 0.015775374488304787 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7250309586524963, -0.8384577631950378, 0.26500362157821655, 0.21900202333927155, -0.17779961228370667, -0.0656784176826477, 0.019996894523501396, -0.22243748605251312, 0.5484986901283264, -0.06035907194018364, -0.5083735585212708, -0.6573349237442017, -0.45660629868507385, 0.20067578554153442, -0.041234035044908524, 0.85797119140625, -0.2007213979959488, -0.13284388184547424, 0.11338698863983154, -0.03696044161915779, -0.2396179586648941, -0.35210302472114563, -0.515448808670044, -0.35180479288101196, 0.17584343254566193, 0.43558117747306824, 0.43141746520996094, 0.8150927424430847, 0.6732242107391357, 0.3020835518836975, -0.29481446743011475, 0.0019318965496495366, -0.17174266278743744, -0.33702218532562256, 0.41164326667785645, -0.357022762298584, -0.8420771956443787, 0.3122332692146301, 0.7567034959793091, 0.618181049823761, -0.09130416810512543, 0.2943456172943115, 0.05102713033556938, 0.5839052796363831, -0.3376683294773102, 0.08584325760602951, -0.2621172070503235, 0.23190483450889587, -0.21074707806110382, -0.2563135027885437, -0.27453476190567017, -0.26059678196907043, -0.11699511855840683, -0.8746283650398254, 0.2744850516319275, 0.3280317783355713, 1.5365331172943115, -0.12831829488277435, -0.2727958858013153, 0.09099434316158295, -0.09816214442253113, 1.0144180059432983, -0.8729686141014099, 0.3473454713821411, 0.751399040222168, 0.12291818112134933, -0.16695614159107208, -0.6060873866081238, -0.6399354338645935, 0.05832874774932861, -0.3867175877094269, 0.35888999700546265, -0.07368514686822891, -0.14187997579574585, 0.3805724084377289, 0.6984599232673645, -0.6692079901695251, 0.14804516732692719, -0.631733775138855, -0.15130732953548431, 1.0895843505859375, 0.30782610177993774, 0.09862285852432251, -0.3512800335884094, -0.7059988975524902, -0.638161838054657, -0.40953823924064636, 0.23579256236553192, 0.4316790699958801, 0.31020626425743103, -0.3915150463581085, 0.6796523928642273, -0.43268734216690063, 0.5341391563415527, 0.4112846851348877, 0.05779744312167168, 0.8747888207435608, -0.7014024257659912, -0.5453742742538452, -0.04438197240233421, 1.10282564163208, 0.5824567079544067, 0.06185639649629593, 0.2132040411233902, 0.02074134349822998, -0.09754817187786102, 0.002265861025080085, -0.8607593178749084, -0.32288166880607605, 0.17650246620178223, -0.38195767998695374, -0.45457780361175537, 0.33092841506004333, -0.8729937672615051, 0.15828892588615417, -0.04007776454091072, 0.43474674224853516, -0.49135980010032654, -0.1241694763302803, 0.24108357727527618, -0.3737339377403259, 0.8186283111572266, -0.15590324997901917, -0.7848780155181885, 0.4112061858177185, 0.5262851119041443, 0.7844058871269226, -0.09995449334383011, -0.4405212998390198, -0.12881802022457123, -0.10848066955804825, -0.28239965438842773, 0.5264919400215149, -0.20725278556346893, -0.4080291986465454, -0.3108550012111664, 0.2968080937862396, -0.25879302620887756, -0.3534546494483948, 0.7642338275909424, -0.22919121384620667, 0.20251929759979248, -0.41853922605514526, -0.6517537236213684, 0.10304093360900879, 0.37315940856933594, -0.4400497376918793, 1.2900853157043457, 0.21669937670230865, -0.8476101160049438, 0.4114849865436554, -0.6193989515304565, -0.15163809061050415, -0.009139077737927437, -0.03864143043756485, -0.7942724227905273, -0.28179696202278137, 0.1869300901889801, 0.40898481011390686, -0.11271372437477112, -0.09317892044782639, -0.3529723584651947, -0.3749256134033203, 0.3429739773273468, -0.2000383734703064, 1.2048208713531494, -0.02809140272438526, -0.7778188586235046, -0.09879877418279648, -1.2091515064239502, 0.349700927734375, 0.198943093419075, -0.36066415905952454, -0.16921433806419373, -0.47676363587379456, -0.039425816386938095, 0.15839044749736786, 0.31961920857429504, -0.7956114411354065, 0.29283827543258667, -0.36522969603538513, 0.1462765634059906, 1.2513132095336914, 0.03343357518315315, 0.12381906807422638, -0.5600679516792297, 0.4937132000923157, 0.19669091701507568, 0.1832907497882843, 0.39830178022384644, -0.5744296312332153, -0.8323053121566772, -0.49649572372436523, -0.08484277129173279, 0.6024841666221619, -0.17476610839366913, 1.0870921611785889, 0.08455944061279297, -0.8950247168540955, -0.45763471722602844, -0.1388702541589737, 0.4906648099422455, 0.7272211909294128, 0.6119765639305115, -0.04026397317647934, -0.6347141265869141, -1.0815447568893433, -0.2909398376941681, -0.15838254988193512, 0.154719278216362, 0.2135521024465561, 1.017371654510498, -0.2597695589065552, 0.5740920901298523, -1.0391172170639038, -0.2007441222667694, 0.15018032491207123, -0.06659312546253204, 0.8005406260490417, 0.7310636639595032, 0.5730723738670349, -0.6905422210693359, -0.53227299451828, 0.18732573091983795, -0.8796259760856628, -0.1023348718881607, 0.13747340440750122, -0.30431345105171204, 0.12448584288358688, 0.11861388385295868, -0.7152248024940491, 0.5249816179275513, 0.23819363117218018, -1.0505595207214355, 1.0769256353378296, -0.35168737173080444, 0.5926905274391174, -1.014482021331787, 0.16645652055740356, -0.04037085920572281, 0.028153149411082268, -0.5097926259040833, 0.041240956634283066, 0.06902631372213364, 0.44930732250213623, -0.46894145011901855, 0.7792510390281677, -0.7000197172164917, -0.07571028172969818, 0.41677790880203247, 0.12703266739845276, -0.14227451384067535, 0.3602132797241211, -0.2426171451807022, 0.8275541067123413, 0.7357518672943115, -0.4899919033050537, 0.5328976511955261, 0.4163818359375, -0.22972875833511353, 0.683232843875885, -0.5091276168823242, -0.2759648859500885, 0.3225334882736206, -0.04917008802294731, -0.8176977634429932, -0.5039980411529541, 0.08578644692897797, -0.5782963037490845, -0.09791530668735504, 0.36481478810310364, -0.2734881043434143, -0.7592375874519348, -0.940464437007904, 0.3421403467655182, 0.6935210227966309, -0.45594146847724915, -0.20442929863929749, 0.05837905406951904, 0.10234544426202774, -0.8387097120285034, -0.842361330986023, -0.4827476441860199, -0.2363918423652649, -0.7322459816932678, 0.3359014093875885, -0.30230003595352173, -0.29410621523857117, -0.08825935423374176, -0.2379688024520874, -0.3319706916809082, 0.017011070623993874, 0.16070249676704407, 0.6608836650848389, -0.4105311632156372, -0.31699296832084656, -0.2525051236152649, -0.19092214107513428, 0.23918132483959198, -0.11796415597200394, 0.38225576281547546, -0.4912276864051819, -0.4352273941040039, -0.44921910762786865, -0.010156780481338501, 0.6693887710571289, -0.06904756277799606, 0.7413033843040466, 0.43794843554496765, -0.2762088179588318, -0.04290851950645447, -0.26194310188293457, -0.26147547364234924, -0.5887128114700317, 0.3107246458530426, -0.4625159800052643, -1.0590723752975464, 0.7509580850601196, 0.4790138304233551, 0.04639088734984398, 1.1206424236297607, 0.5967420935630798, -0.2867138981819153, 0.9825522303581238, 0.0461319275200367, 0.31933149695396423, 0.3776337802410126, -0.6776336431503296, 0.13601510226726532, -0.89864182472229, -0.33192014694213867, -0.5561926960945129, -0.44595083594322205, -0.7317463755607605, -0.09750338643789291, 0.2820478677749634, 0.16229625046253204, -0.6841951608657837, 0.5485203862190247, -0.8416715860366821, 0.565873920917511, 0.5680723190307617, 0.2593405246734619, 0.18284370005130768, -0.16452708840370178, -0.3273538053035736, -0.13386066257953644, -0.46190884709358215, -0.25307613611221313, 1.1969842910766602, 0.25792327523231506, 0.7137659788131714, 0.09257823973894119, 0.907896101474762, 0.12570121884346008, -0.08924107998609543, -0.5858661532402039, 0.6574344635009766, 0.09971600770950317, -0.758414626121521, -0.3775175213813782, -0.507029116153717, -1.0553388595581055, 0.37039557099342346, -0.07510408014059067, -0.8784271478652954, 0.068620964884758, 0.05183111131191254, -0.18584075570106506, 0.501018762588501, -0.5692678689956665, 0.8443268537521362, -0.16321679949760437, -0.463177353143692, 0.08991291373968124, -0.8044901490211487, 0.4798915386199951, 0.1904679387807846, 0.23444068431854248, 0.06720622628927231, 0.2738955318927765, 1.1898150444030762, -0.8263546228408813, 0.4399718642234802, 0.06776200234889984, 0.03278276324272156, 0.3483528196811676, -0.1826627403497696, 0.5112485289573669, 0.08623024821281433, -0.005768992006778717, -0.08406811207532883, 0.2977670729160309, -0.8299148678779602, -0.07234803587198257, 0.9218860268592834, -0.9887869358062744, -0.6323260068893433, -0.9029138684272766, -0.5289150476455688, 0.0981200784444809, 0.5287163853645325, 0.3733932673931122, 0.49206751585006714, 0.00028877536533400416, 0.4397772252559662, 0.8562093377113342, -0.1386355310678482, 0.6149365901947021, 0.20424611866474152, 0.1121860146522522, -0.6385335922241211, 0.8451650142669678, 0.09794451296329498, 0.36114925146102905, 0.2643558979034424, 0.36776673793792725, -0.5472924709320068, -0.22426384687423706, -0.21921372413635254, 0.48080047965049744, -0.6087947487831116, -0.25343042612075806, -0.36582258343696594, -0.43355175852775574, -0.720855712890625, -0.6024425625801086, -0.33632516860961914, -0.5008864402770996, -0.46004703640937805, -0.4978615641593933, 0.5939691662788391, 0.489719033241272, -0.40076944231987, 0.07927574962377548, -0.5117875337600708, 0.24388249218463898, 0.31660306453704834, 0.5332404375076294, -0.35944193601608276, -0.5656526684761047, 0.02522900141775608, -0.12224341183900833, -0.547844648361206, -0.9488386511802673, 0.34443339705467224, -0.032535113394260406, 0.5267403721809387, 0.5729771256446838, 0.07710398733615875, 0.8333536982536316, -0.19740895926952362, 1.0497146844863892, 0.29982489347457886, -0.822847306728363, 0.7189233899116516, -0.3245003819465637, 0.14987820386886597, 0.6058173775672913, 0.1826653629541397, -0.22083164751529694, -0.6163834929466248, -1.2732830047607422, -0.8041582107543945, 0.6854072213172913, 0.40501508116722107, -0.26530537009239197, 0.07874157279729843, 0.16448946297168732, -0.26228034496307373, -0.17638620734214783, -0.7218154072761536, -0.896436333656311, -0.16430971026420593, -0.49528759717941284, 0.12527026236057281, 0.007174125872552395, -0.3933422565460205, -0.7686139345169067, 0.9375242590904236, 0.0044742668978869915, 0.6091815829277039, 0.4595828354358673, 0.09397874027490616, 0.09339532256126404, 0.5001910328865051, 0.8826695680618286, 0.7216609120368958, -0.45576152205467224, 0.43704891204833984, 0.41498205065727234, -1.038769245147705, 0.4793144166469574, 0.35082247853279114, -0.08071621507406235, -0.0400591716170311, 0.46139776706695557, 0.4332304894924164, 0.012289872393012047, -0.18801110982894897, 0.6067873239517212, -0.00823215488344431, -0.5441049933433533, -0.39823204278945923, 0.11387190222740173, -0.09986354410648346, 0.04046669974923134, 0.39663371443748474, -0.1703668087720871, -0.03690781816840172, -0.47595134377479553, 0.49905532598495483, 0.35748133063316345, -0.44400495290756226, -0.1848611980676651, 0.7287923097610474, -0.20743447542190552, -0.13814741373062134, 0.344876229763031, -0.20697705447673798, -0.6406880021095276, 1.1291834115982056, 0.6057460904121399, 0.6895597577095032, -0.24169482290744781, -0.03748217225074768, 0.9096269011497498, 0.36075010895729065, -0.06961502134799957, 0.5116990208625793, 0.30132296681404114, -0.25964343547821045, 0.14307698607444763, -0.8519088625907898, -0.0439436249434948, 0.15700490772724152, -0.812509298324585, 0.2989535629749298, -0.5222490429878235, -0.14869454503059387, -0.013355635106563568, 0.463926762342453, -0.44455376267433167, 0.572432279586792, -0.41449832916259766, 1.1807161569595337, -0.9590610861778259, 0.7202745079994202, 0.7340178489685059, -0.5389214754104614, -1.07215416431427, -0.48585590720176697, 0.029897665604948997, -0.8034133911132812, 0.6098300218582153, -0.024123042821884155, 0.1418485939502716, -0.06993154436349869, -0.7616963982582092, -0.904586136341095, 1.4132070541381836, -0.04295320808887482, -0.4663141071796417, 0.24327191710472107, -0.04081727936863899, 0.45132461190223694, 0.15591548383235931, 0.5859191417694092, 0.7155734300613403, 0.7976967692375183, -0.09162624925374985, -0.7549501061439514, 0.36266133189201355, -0.4919753670692444, -0.3620606064796448, 0.4390527307987213, -0.9684833288192749, 1.2114810943603516, -0.011986699886620045, 0.19395481050014496, -0.15853622555732727, 0.6701090335845947, 0.8178242444992065, 0.32460638880729675, 0.34361493587493896, 0.8908132314682007, 0.8532417416572571, -0.5033841729164124, 1.0130443572998047, -0.20983824133872986, 0.880878210067749, 0.7156526446342468, 0.228954017162323, 0.7415173649787903, 0.660916805267334, -0.5750014185905457, 0.5150113105773926, 0.818186342716217, -0.3202654719352722, 0.41367143392562866, 0.27127721905708313, -0.17283326387405396, -0.12286080420017242, 0.39938199520111084, -0.8873936533927917, 0.10333457589149475, 0.05902312695980072, -0.3284139037132263, 0.09670095890760422, -0.5036485195159912, 0.3197932541370392, -0.10879842191934586, -0.02671467699110508, 0.34858012199401855, 0.08276104927062988, -0.4220322370529175, 0.870227038860321, -0.13661208748817444, 0.7622222900390625, -0.5475121736526489, -0.09411508589982986, -0.3929678201675415, 0.6094996929168701, -0.4499213993549347, -1.076979637145996, 0.17924579977989197, 0.058777663856744766, -0.10648763924837112, -0.16265134513378143, 0.7106562256813049, -0.20065589249134064, -0.7408562898635864, 0.1655603051185608, 0.07941894233226776, 0.11667068302631378, 0.5041341781616211, -0.6463366746902466, -0.3504769206047058, -0.007370983250439167, -0.5718717575073242, 0.12730200588703156, 0.3270470201969147, 0.2626945674419403, 0.5428704023361206, 0.6385912895202637, 0.17611025273799896, 0.41666731238365173, -0.562588095664978, 0.8008943796157837, -1.0486842393875122, -0.7224307656288147, -0.9412881731987, 0.442634254693985, -0.32978829741477966, -0.8376794457435608, 1.008986234664917, 1.0239444971084595, 0.9238398671150208, 0.01580224744975567, 0.658718466758728, -0.39341503381729126, 0.2290240377187729, -0.389884889125824, 0.9435304403305054, -0.8499107956886292, -0.19691310822963715, -0.2587892711162567, -0.7079907059669495, -0.35493069887161255, 0.8199213743209839, -0.17006753385066986, 0.06471090763807297, 1.0231397151947021, 0.6729491949081421, -0.12077063322067261, 0.05315430089831352, -0.05484699457883835, 0.5727947354316711, 0.3790859580039978, 0.9964702129364014, 0.6604653596878052, -0.7891599535942078, 0.34671807289123535, -0.49922922253608704, -0.4036366045475006, -0.3741743564605713, -0.4553237557411194, -0.8488442897796631, -0.4797288179397583, -0.2409394532442093, -0.5994923114776611, -0.09853872656822205, 0.9970782995223999, 0.4805019497871399, -0.9094943404197693, -0.3970107436180115, -0.1280430108308792, 0.12527413666248322, -0.5678335428237915, -0.4118396043777466, 0.7657878994941711, -0.08187229186296463, -0.549926221370697, 0.18044239282608032, -0.14040899276733398, 0.20881438255310059, 0.1167750209569931, -0.4322260916233063, -0.7175820469856262, -0.005172214470803738, 0.40878111124038696, 0.30914056301116943, -0.6696144938468933, -0.7150379419326782, 0.2831403315067291, -0.5000683665275574, 0.44509467482566833, -0.027422703802585602, -0.47881758213043213, 0.026894250884652138, 0.6884031295776367, 0.4915948808193207, 0.6928786635398865, -0.018821200355887413, 0.09013285487890244, -0.6616897583007812, 0.16826893389225006, -0.02647896111011505, 0.27498212456703186, -0.02273930422961712, -0.3336585462093353, 0.7965355515480042, 0.6861627697944641, -0.5670035481452942, -1.0212137699127197, -0.410414457321167, -1.4505939483642578, -0.006901133805513382, 1.1055809259414673, -0.05064291134476662, -0.48424583673477173, 0.2697695791721344, -0.1643928438425064, 0.25042060017585754, -0.32880768179893494, 0.7673773169517517, 0.782490074634552, -0.38135695457458496, 0.08164022117853165, -0.6708600521087646, 0.38886865973472595, 0.5383744835853577, -1.211380124092102, -0.08354463428258896, 0.2539016306400299, 0.32328349351882935, 0.32106050848960876, 0.6435789465904236, -0.06425464898347855, 0.23990106582641602, 0.25185900926589966, 0.036559320986270905, -0.003779538907110691, 0.07708147168159485, -0.2153119444847107, 0.0852680504322052, -0.23703241348266602, -0.4890170097351074 ]
open-llm-leaderboard/details_Kunhao__pile-7b
open-llm-leaderboard
2023-08-27T12:40:00Z
201
0
[ "region:us" ]
null
2023-08-18T18:39:47Z
--- pretty_name: Evaluation run of Kunhao/pile-7b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Kunhao/pile-7b](https://huggingface.co/Kunhao/pile-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kunhao__pile-7b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-17T14:02:00.215909](https://huggingface.co/datasets/open-llm-leaderboard/details_Kunhao__pile-7b/blob/main/results_2023-08-17T14%3A02%3A00.215909.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26607314141949256,\n\ \ \"acc_stderr\": 0.031950603341667064,\n \"acc_norm\": 0.2676071883857905,\n\ \ \"acc_norm_stderr\": 0.03196207703098002,\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.014816195991931572,\n \"mc2\": 0.4240744665255174,\n\ \ \"mc2_stderr\": 0.014948776413812296\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2380546075085324,\n \"acc_stderr\": 0.012445770028026203,\n\ \ \"acc_norm\": 0.26791808873720135,\n \"acc_norm_stderr\": 0.01294203019513643\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3269269069906393,\n\ \ \"acc_stderr\": 0.004681316064444439,\n \"acc_norm\": 0.3875721967735511,\n\ \ \"acc_norm_stderr\": 0.004862003566798543\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n\ \ \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n\ \ \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.035834961763610625,\n\ \ \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.035834961763610625\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n\ \ \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \ \ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21132075471698114,\n \"acc_stderr\": 0.025125766484827842,\n\ \ \"acc_norm\": 0.21132075471698114,\n \"acc_norm_stderr\": 0.025125766484827842\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.20833333333333334,\n\ \ \"acc_stderr\": 0.033961162058453336,\n \"acc_norm\": 0.20833333333333334,\n\ \ \"acc_norm_stderr\": 0.033961162058453336\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n\ \ \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n\ \ \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n\ \ \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n\ \ \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n\ \ \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.02767845257821239,\n\ \ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.02767845257821239\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.25517241379310346,\n \"acc_stderr\": 0.03632984052707841,\n\ \ \"acc_norm\": 0.25517241379310346,\n \"acc_norm_stderr\": 0.03632984052707841\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.25132275132275134,\n \"acc_stderr\": 0.022340482339643898,\n \"\ acc_norm\": 0.25132275132275134,\n \"acc_norm_stderr\": 0.022340482339643898\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.20634920634920634,\n\ \ \"acc_stderr\": 0.036196045241242515,\n \"acc_norm\": 0.20634920634920634,\n\ \ \"acc_norm_stderr\": 0.036196045241242515\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.27741935483870966,\n\ \ \"acc_stderr\": 0.025470196835900055,\n \"acc_norm\": 0.27741935483870966,\n\ \ \"acc_norm_stderr\": 0.025470196835900055\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.24630541871921183,\n \"acc_stderr\": 0.030315099285617722,\n\ \ \"acc_norm\": 0.24630541871921183,\n \"acc_norm_stderr\": 0.030315099285617722\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\"\ : 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.0340150671524904,\n\ \ \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.0340150671524904\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.35858585858585856,\n \"acc_stderr\": 0.03416903640391521,\n \"\ acc_norm\": 0.35858585858585856,\n \"acc_norm_stderr\": 0.03416903640391521\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n\ \ \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.31025641025641026,\n \"acc_stderr\": 0.023454674889404288,\n\ \ \"acc_norm\": 0.31025641025641026,\n \"acc_norm_stderr\": 0.023454674889404288\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948492,\n \ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948492\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.2184873949579832,\n \"acc_stderr\": 0.026841514322958948,\n\ \ \"acc_norm\": 0.2184873949579832,\n \"acc_norm_stderr\": 0.026841514322958948\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.271523178807947,\n \"acc_stderr\": 0.036313298039696525,\n \"\ acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.036313298039696525\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.25871559633027524,\n \"acc_stderr\": 0.018776052319619624,\n \"\ acc_norm\": 0.25871559633027524,\n \"acc_norm_stderr\": 0.018776052319619624\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538272,\n \"\ acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538272\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.27941176470588236,\n \"acc_stderr\": 0.031493281045079556,\n \"\ acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.031493281045079556\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.2109704641350211,\n \"acc_stderr\": 0.026558372502661923,\n \ \ \"acc_norm\": 0.2109704641350211,\n \"acc_norm_stderr\": 0.026558372502661923\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.28699551569506726,\n\ \ \"acc_stderr\": 0.030360379710291954,\n \"acc_norm\": 0.28699551569506726,\n\ \ \"acc_norm_stderr\": 0.030360379710291954\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306085,\n\ \ \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306085\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.256198347107438,\n \"acc_stderr\": 0.039849796533028725,\n \"\ acc_norm\": 0.256198347107438,\n \"acc_norm_stderr\": 0.039849796533028725\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.03291099578615769,\n\ \ \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.03291099578615769\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.042878587513404565,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.042878587513404565\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.1794871794871795,\n\ \ \"acc_stderr\": 0.02514093595033545,\n \"acc_norm\": 0.1794871794871795,\n\ \ \"acc_norm_stderr\": 0.02514093595033545\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.24648786717752236,\n\ \ \"acc_stderr\": 0.015411308769686941,\n \"acc_norm\": 0.24648786717752236,\n\ \ \"acc_norm_stderr\": 0.015411308769686941\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.02353292543104428,\n\ \ \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.02353292543104428\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23687150837988827,\n\ \ \"acc_stderr\": 0.01421957078810398,\n \"acc_norm\": 0.23687150837988827,\n\ \ \"acc_norm_stderr\": 0.01421957078810398\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.02495418432487991,\n\ \ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.02495418432487991\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24115755627009647,\n\ \ \"acc_stderr\": 0.024296594034763426,\n \"acc_norm\": 0.24115755627009647,\n\ \ \"acc_norm_stderr\": 0.024296594034763426\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.023788583551658544,\n\ \ \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.023788583551658544\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290413,\n \ \ \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290413\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24902216427640156,\n\ \ \"acc_stderr\": 0.011044892264040772,\n \"acc_norm\": 0.24902216427640156,\n\ \ \"acc_norm_stderr\": 0.011044892264040772\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4227941176470588,\n \"acc_stderr\": 0.03000856284500347,\n\ \ \"acc_norm\": 0.4227941176470588,\n \"acc_norm_stderr\": 0.03000856284500347\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.24673202614379086,\n \"acc_stderr\": 0.0174408203674025,\n \ \ \"acc_norm\": 0.24673202614379086,\n \"acc_norm_stderr\": 0.0174408203674025\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.19090909090909092,\n\ \ \"acc_stderr\": 0.03764425585984924,\n \"acc_norm\": 0.19090909090909092,\n\ \ \"acc_norm_stderr\": 0.03764425585984924\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.3877551020408163,\n \"acc_stderr\": 0.031192230726795656,\n\ \ \"acc_norm\": 0.3877551020408163,\n \"acc_norm_stderr\": 0.031192230726795656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.25870646766169153,\n\ \ \"acc_stderr\": 0.030965903123573026,\n \"acc_norm\": 0.25870646766169153,\n\ \ \"acc_norm_stderr\": 0.030965903123573026\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2891566265060241,\n\ \ \"acc_stderr\": 0.03529486801511115,\n \"acc_norm\": 0.2891566265060241,\n\ \ \"acc_norm_stderr\": 0.03529486801511115\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.2573099415204678,\n \"acc_stderr\": 0.03352799844161865,\n\ \ \"acc_norm\": 0.2573099415204678,\n \"acc_norm_stderr\": 0.03352799844161865\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.014816195991931572,\n \"mc2\": 0.4240744665255174,\n\ \ \"mc2_stderr\": 0.014948776413812296\n }\n}\n```" repo_url: https://huggingface.co/Kunhao/pile-7b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|arc:challenge|25_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hellaswag|10_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T14_02_00.215909 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T14:02:00.215909.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T14:02:00.215909.parquet' - config_name: results data_files: - split: 2023_08_17T14_02_00.215909 path: - results_2023-08-17T14:02:00.215909.parquet - split: latest path: - results_2023-08-17T14:02:00.215909.parquet --- # Dataset Card for Evaluation run of Kunhao/pile-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Kunhao/pile-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Kunhao/pile-7b](https://huggingface.co/Kunhao/pile-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kunhao__pile-7b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T14:02:00.215909](https://huggingface.co/datasets/open-llm-leaderboard/details_Kunhao__pile-7b/blob/main/results_2023-08-17T14%3A02%3A00.215909.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26607314141949256, "acc_stderr": 0.031950603341667064, "acc_norm": 0.2676071883857905, "acc_norm_stderr": 0.03196207703098002, "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931572, "mc2": 0.4240744665255174, "mc2_stderr": 0.014948776413812296 }, "harness|arc:challenge|25": { "acc": 0.2380546075085324, "acc_stderr": 0.012445770028026203, "acc_norm": 0.26791808873720135, "acc_norm_stderr": 0.01294203019513643 }, "harness|hellaswag|10": { "acc": 0.3269269069906393, "acc_stderr": 0.004681316064444439, "acc_norm": 0.3875721967735511, "acc_norm_stderr": 0.004862003566798543 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.23703703703703705, "acc_stderr": 0.03673731683969506, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2631578947368421, "acc_stderr": 0.035834961763610625, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.035834961763610625 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21132075471698114, "acc_stderr": 0.025125766484827842, "acc_norm": 0.21132075471698114, "acc_norm_stderr": 0.025125766484827842 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.20833333333333334, "acc_stderr": 0.033961162058453336, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.033961162058453336 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.23404255319148937, "acc_stderr": 0.02767845257821239, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.02767845257821239 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.25517241379310346, "acc_stderr": 0.03632984052707841, "acc_norm": 0.25517241379310346, "acc_norm_stderr": 0.03632984052707841 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25132275132275134, "acc_stderr": 0.022340482339643898, "acc_norm": 0.25132275132275134, "acc_norm_stderr": 0.022340482339643898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.036196045241242515, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.036196045241242515 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.27741935483870966, "acc_stderr": 0.025470196835900055, "acc_norm": 0.27741935483870966, "acc_norm_stderr": 0.025470196835900055 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.24630541871921183, "acc_stderr": 0.030315099285617722, "acc_norm": 0.24630541871921183, "acc_norm_stderr": 0.030315099285617722 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.0340150671524904, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.0340150671524904 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35858585858585856, "acc_stderr": 0.03416903640391521, "acc_norm": 0.35858585858585856, "acc_norm_stderr": 0.03416903640391521 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.31025641025641026, "acc_stderr": 0.023454674889404288, "acc_norm": 0.31025641025641026, "acc_norm_stderr": 0.023454674889404288 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948492, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2184873949579832, "acc_stderr": 0.026841514322958948, "acc_norm": 0.2184873949579832, "acc_norm_stderr": 0.026841514322958948 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.036313298039696525, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.036313298039696525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.25871559633027524, "acc_stderr": 0.018776052319619624, "acc_norm": 0.25871559633027524, "acc_norm_stderr": 0.018776052319619624 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.03400603625538272, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.031493281045079556, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.031493281045079556 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2109704641350211, "acc_stderr": 0.026558372502661923, "acc_norm": 0.2109704641350211, "acc_norm_stderr": 0.026558372502661923 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.28699551569506726, "acc_stderr": 0.030360379710291954, "acc_norm": 0.28699551569506726, "acc_norm_stderr": 0.030360379710291954 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306085, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.039849796533028725, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.039849796533028725 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2222222222222222, "acc_stderr": 0.040191074725573483, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615769, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.042878587513404565, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.042878587513404565 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.1794871794871795, "acc_stderr": 0.02514093595033545, "acc_norm": 0.1794871794871795, "acc_norm_stderr": 0.02514093595033545 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.24648786717752236, "acc_stderr": 0.015411308769686941, "acc_norm": 0.24648786717752236, "acc_norm_stderr": 0.015411308769686941 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.02353292543104428, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.02353292543104428 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23687150837988827, "acc_stderr": 0.01421957078810398, "acc_norm": 0.23687150837988827, "acc_norm_stderr": 0.01421957078810398 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2549019607843137, "acc_stderr": 0.02495418432487991, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.24115755627009647, "acc_stderr": 0.024296594034763426, "acc_norm": 0.24115755627009647, "acc_norm_stderr": 0.024296594034763426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24074074074074073, "acc_stderr": 0.023788583551658544, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.023788583551658544 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290413, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290413 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24902216427640156, "acc_stderr": 0.011044892264040772, "acc_norm": 0.24902216427640156, "acc_norm_stderr": 0.011044892264040772 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4227941176470588, "acc_stderr": 0.03000856284500347, "acc_norm": 0.4227941176470588, "acc_norm_stderr": 0.03000856284500347 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24673202614379086, "acc_stderr": 0.0174408203674025, "acc_norm": 0.24673202614379086, "acc_norm_stderr": 0.0174408203674025 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.19090909090909092, "acc_stderr": 0.03764425585984924, "acc_norm": 0.19090909090909092, "acc_norm_stderr": 0.03764425585984924 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3877551020408163, "acc_stderr": 0.031192230726795656, "acc_norm": 0.3877551020408163, "acc_norm_stderr": 0.031192230726795656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.25870646766169153, "acc_stderr": 0.030965903123573026, "acc_norm": 0.25870646766169153, "acc_norm_stderr": 0.030965903123573026 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2573099415204678, "acc_stderr": 0.03352799844161865, "acc_norm": 0.2573099415204678, "acc_norm_stderr": 0.03352799844161865 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931572, "mc2": 0.4240744665255174, "mc2_stderr": 0.014948776413812296 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7399405837059021, -0.8205119371414185, 0.2659832835197449, 0.2412354201078415, -0.19689533114433289, -0.07090506702661514, 0.031457096338272095, -0.23047491908073425, 0.5709471106529236, -0.03913051635026932, -0.4698479175567627, -0.6997960805892944, -0.46289342641830444, 0.26051008701324463, -0.03759206458926201, 0.825295627117157, -0.15627723932266235, -0.145371675491333, 0.05046505481004715, -0.0668095052242279, -0.26911380887031555, -0.3350652754306793, -0.4655192494392395, -0.3389316499233246, 0.1891472339630127, 0.4429735839366913, 0.4551891088485718, 0.8577360510826111, 0.6683761477470398, 0.2854795455932617, -0.3362451493740082, -0.048311442136764526, -0.1693006306886673, -0.328730046749115, 0.42340660095214844, -0.3699105381965637, -0.8316362500190735, 0.28368276357650757, 0.7791169285774231, 0.6438698768615723, -0.09888790547847748, 0.29503920674324036, 0.02296982705593109, 0.6191263198852539, -0.3647872507572174, 0.044418998062610626, -0.28959882259368896, 0.22972451150417328, -0.21130511164665222, -0.262147456407547, -0.2994845509529114, -0.2292460948228836, -0.17960107326507568, -0.8852626085281372, 0.28321462869644165, 0.3341866731643677, 1.600744366645813, -0.11796105653047562, -0.23887836933135986, 0.09702862799167633, -0.09420238435268402, 1.0228058099746704, -0.9090594053268433, 0.3457249701023102, 0.8142634034156799, 0.1440337896347046, -0.17701223492622375, -0.5253767371177673, -0.6323313117027283, 0.09210339933633804, -0.3901056945323944, 0.3665967285633087, 0.021579178050160408, -0.19221286475658417, 0.3867458701133728, 0.715264081954956, -0.6523600816726685, 0.1612251251935959, -0.640082597732544, -0.14966677129268646, 1.0982366800308228, 0.34076836705207825, 0.06925132870674133, -0.4139266312122345, -0.7004806399345398, -0.6733057498931885, -0.3866581618785858, 0.30137014389038086, 0.43694114685058594, 0.32221072912216187, -0.40388917922973633, 0.6875318884849548, -0.42324772477149963, 0.5743886828422546, 0.4222353398799896, 0.022266987711191177, 0.875345766544342, -0.6378983855247498, -0.5489891767501831, -0.07912193983793259, 1.1249836683273315, 0.5370484590530396, 0.04315352439880371, 0.2318897843360901, 0.013079910539090633, -0.09411810338497162, 0.051849063485860825, -0.8555380702018738, -0.3152797222137451, 0.18872983753681183, -0.41601046919822693, -0.5520380139350891, 0.37278884649276733, -0.9062361717224121, 0.08535970002412796, -0.016592249274253845, 0.36760076880455017, -0.4861280024051666, -0.12868916988372803, 0.2386542707681656, -0.4264666736125946, 0.8468427062034607, -0.18823960423469543, -0.7778701186180115, 0.3912781774997711, 0.5320993065834045, 0.73722904920578, -0.09937484562397003, -0.42965206503868103, -0.11475826054811478, -0.08065798878669739, -0.28859660029411316, 0.5504451394081116, -0.31733399629592896, -0.4420674443244934, -0.2936721444129944, 0.3055625855922699, -0.23950666189193726, -0.34671008586883545, 0.7344003319740295, -0.20616181194782257, 0.21336862444877625, -0.4583786427974701, -0.6337938904762268, 0.08705829828977585, 0.41080042719841003, -0.43592923879623413, 1.264498233795166, 0.27171382308006287, -0.820896327495575, 0.44239136576652527, -0.5712118744850159, -0.16962219774723053, -0.03183938190340996, -0.03789900243282318, -0.8470708727836609, -0.29098382592201233, 0.17286667227745056, 0.4271546006202698, -0.14958681166172028, -0.10346148908138275, -0.39482468366622925, -0.35106003284454346, 0.32247015833854675, -0.14496593177318573, 1.2426012754440308, -0.04188989847898483, -0.761652946472168, -0.07817097008228302, -1.2401989698410034, 0.2603411078453064, 0.2322482019662857, -0.3791961371898651, -0.1835605353116989, -0.46182551980018616, -0.06339673697948456, 0.1624952256679535, 0.278001606464386, -0.7758491039276123, 0.2922361493110657, -0.35978832840919495, 0.10627919435501099, 1.2834967374801636, 0.021877866238355637, 0.17285364866256714, -0.604814887046814, 0.5261043310165405, 0.22891853749752045, 0.1990698128938675, 0.40675869584083557, -0.6156731247901917, -0.8085407018661499, -0.463801771402359, -0.0378715805709362, 0.6083899736404419, -0.18080265820026398, 1.1431324481964111, 0.07846643775701523, -0.9169304370880127, -0.4272193908691406, -0.11885902285575867, 0.49901649355888367, 0.7855237126350403, 0.5891905426979065, -0.03729341924190521, -0.6193286180496216, -1.0849684476852417, -0.2983669936656952, -0.1486860066652298, 0.13908545672893524, 0.24206754565238953, 0.9955106377601624, -0.23464813828468323, 0.561080813407898, -1.037821888923645, -0.22443516552448273, 0.2185813933610916, -0.07733345031738281, 0.8299888968467712, 0.7288581132888794, 0.5899791121482849, -0.6997304558753967, -0.5572616457939148, 0.23241020739078522, -0.9061731696128845, -0.0754282996058464, 0.10496988147497177, -0.34602662920951843, 0.17112533748149872, 0.13269385695457458, -0.6926742196083069, 0.5324921011924744, 0.2377653270959854, -1.0739037990570068, 1.0571519136428833, -0.3281092643737793, 0.5838326811790466, -0.9900394082069397, 0.2078125923871994, -0.10291658341884613, 0.04295968636870384, -0.45672503113746643, 0.04388017579913139, 0.10336669534444809, 0.45374390482902527, -0.48390334844589233, 0.7984970211982727, -0.7390317916870117, -0.06923989951610565, 0.45776239037513733, 0.12721441686153412, -0.13274693489074707, 0.3979567885398865, -0.26189321279525757, 0.7500553727149963, 0.7896969318389893, -0.5107221603393555, 0.5297867655754089, 0.4277817904949188, -0.18211223185062408, 0.711503267288208, -0.45988547801971436, -0.2924862205982208, 0.2742622494697571, -0.04391748830676079, -0.8318591117858887, -0.5029295086860657, 0.055830180644989014, -0.6096179485321045, -0.10073038935661316, 0.37002214789390564, -0.26901525259017944, -0.832711398601532, -0.9703652262687683, 0.3307116627693176, 0.7514462471008301, -0.4113982617855072, -0.1550222933292389, 0.08705627173185349, 0.13189677894115448, -0.8488748073577881, -0.7740445137023926, -0.5116846561431885, -0.2534258961677551, -0.6994376182556152, 0.29211971163749695, -0.2710133492946625, -0.2645580768585205, -0.08887896686792374, -0.22223730385303497, -0.333574116230011, 0.003961901646107435, 0.1341717690229416, 0.6327248215675354, -0.3920753598213196, -0.3403363525867462, -0.2325666844844818, -0.18007196485996246, 0.27359485626220703, -0.0766078531742096, 0.3854879140853882, -0.46341919898986816, -0.3811098635196686, -0.3518323600292206, -0.04246572405099869, 0.7069487571716309, -0.07798214256763458, 0.7364289164543152, 0.45600560307502747, -0.3198094367980957, 0.005566878244280815, -0.2875756621360779, -0.23979812860488892, -0.5774641036987305, 0.22465835511684418, -0.5298891067504883, -1.0230010747909546, 0.8326606154441833, 0.5364924073219299, 0.09675277024507523, 1.1616206169128418, 0.589550793170929, -0.3059966266155243, 1.0300601720809937, 0.02863495983183384, 0.33950257301330566, 0.3701115846633911, -0.6883596181869507, 0.08585263788700104, -0.9355024695396423, -0.3103713393211365, -0.5587123036384583, -0.5058779716491699, -0.7112026810646057, -0.07584553211927414, 0.2877712845802307, 0.2055065631866455, -0.6680594682693481, 0.5900225639343262, -0.8694323897361755, 0.5872008800506592, 0.5629879832267761, 0.27854490280151367, 0.16592572629451752, -0.20953813195228577, -0.37735286355018616, -0.06955361366271973, -0.4832642376422882, -0.22724221646785736, 1.2073876857757568, 0.24282416701316833, 0.6696670055389404, 0.10253888368606567, 0.9016224145889282, 0.07391577959060669, -0.027059229090809822, -0.5693605542182922, 0.6504908800125122, 0.08872725814580917, -0.7898743748664856, -0.45790815353393555, -0.49913933873176575, -1.1495468616485596, 0.4229850172996521, -0.14392894506454468, -0.8112146258354187, 0.09291349351406097, 0.03626100718975067, -0.16880887746810913, 0.5223057866096497, -0.5206637978553772, 0.8382406830787659, -0.10711617022752762, -0.459762305021286, 0.0817500501871109, -0.8570814728736877, 0.5039964914321899, 0.19308578968048096, 0.29825258255004883, 0.012852762825787067, 0.259931355714798, 1.1684008836746216, -0.8515809774398804, 0.4335096776485443, 0.069082111120224, 0.030711183324456215, 0.36477261781692505, -0.14009299874305725, 0.5249678492546082, 0.09222155064344406, -0.004055705387145281, -0.09842227399349213, 0.30397945642471313, -0.8970187902450562, -0.040549054741859436, 0.9325199723243713, -0.9483095407485962, -0.6268001794815063, -0.8895257711410522, -0.5489954948425293, 0.09775976091623306, 0.6087955832481384, 0.3678520917892456, 0.49080026149749756, 0.00660863658413291, 0.46654215455055237, 0.8290995955467224, -0.1452205330133438, 0.6255860924720764, 0.20106859505176544, 0.06202273070812225, -0.6766897439956665, 0.8500335216522217, 0.058914992958307266, 0.352658212184906, 0.28981417417526245, 0.4715140759944916, -0.5349092483520508, -0.16223353147506714, -0.25202468037605286, 0.5368033051490784, -0.6279492378234863, -0.29865342378616333, -0.3796970844268799, -0.38037756085395813, -0.7829475402832031, -0.6643592715263367, -0.26143407821655273, -0.4722914695739746, -0.46592265367507935, -0.4665074944496155, 0.6035563945770264, 0.4964137375354767, -0.38666316866874695, 0.021790510043501854, -0.5267673134803772, 0.29579922556877136, 0.3308447301387787, 0.5762972831726074, -0.36955276131629944, -0.5421833395957947, 0.07022898644208908, -0.12118733674287796, -0.5497918725013733, -0.9723341464996338, 0.34392303228378296, -0.10679607838392258, 0.5198627710342407, 0.5873139500617981, 0.0820387527346611, 0.8218346834182739, -0.20549972355365753, 1.024072527885437, 0.3132818639278412, -0.7828692197799683, 0.7002876996994019, -0.4075966775417328, 0.19441042840480804, 0.6285778880119324, 0.17736373841762543, -0.1858169138431549, -0.6856533288955688, -1.3702878952026367, -0.7827239036560059, 0.6395596861839294, 0.3895329535007477, -0.28968116641044617, 0.04978237673640251, 0.10307571291923523, -0.28807786107063293, -0.18001525104045868, -0.670360803604126, -0.8674853444099426, -0.1607675999403, -0.4973345398902893, 0.11691892892122269, 0.03933524712920189, -0.3971390724182129, -0.8266142010688782, 0.9155530333518982, 0.012021346017718315, 0.5801374912261963, 0.4416951537132263, 0.07201013714075089, 0.042153000831604004, 0.4445537030696869, 0.9551597833633423, 0.7317474484443665, -0.4472496807575226, 0.40209367871284485, 0.43018803000450134, -1.0353323221206665, 0.4536825716495514, 0.30046021938323975, -0.06423600763082504, -0.0310391653329134, 0.4381105303764343, 0.3953106999397278, 0.04697813093662262, -0.17776724696159363, 0.6386358141899109, 0.022650452330708504, -0.5576907396316528, -0.42801281809806824, 0.07713073492050171, -0.1091095507144928, -0.07357051223516464, 0.3546401560306549, -0.1417403668165207, -0.023755455389618874, -0.46124523878097534, 0.4879209101200104, 0.373714417219162, -0.47641831636428833, -0.11569635570049286, 0.7450783252716064, -0.1688854992389679, -0.16754873096942902, 0.3294970989227295, -0.19656644761562347, -0.6336321234703064, 1.174605131149292, 0.5784890651702881, 0.6854851245880127, -0.27044665813446045, -0.08915966749191284, 0.9560851454734802, 0.3798481225967407, -0.05386614426970482, 0.5044841766357422, 0.3107723891735077, -0.237638920545578, 0.16571488976478577, -0.8833985328674316, -0.042419396340847015, 0.16369156539440155, -0.8225545883178711, 0.3691665530204773, -0.5656712651252747, -0.12946069240570068, -0.031309910118579865, 0.38310396671295166, -0.44223764538764954, 0.5631380081176758, -0.4188506603240967, 1.1667464971542358, -0.9847583174705505, 0.6572938561439514, 0.7698412537574768, -0.5476041436195374, -1.0384167432785034, -0.548051118850708, 0.00017276397556997836, -0.8142772316932678, 0.502974808216095, -0.012470891699194908, 0.21756772696971893, -0.05886112153530121, -0.7328423857688904, -0.9495850801467896, 1.408615231513977, -0.12020434439182281, -0.41116833686828613, 0.26464754343032837, -0.06909847259521484, 0.4376088082790375, 0.20038698613643646, 0.571934163570404, 0.7139438390731812, 0.8488835096359253, -0.10850701481103897, -0.7484794855117798, 0.36109697818756104, -0.5130143761634827, -0.33757999539375305, 0.5369744896888733, -0.9615870714187622, 1.193501353263855, 0.028242360800504684, 0.20024266839027405, -0.1892707347869873, 0.6697603464126587, 0.8197821974754333, 0.2828903794288635, 0.4222736954689026, 0.9167717695236206, 0.8566074371337891, -0.4906909763813019, 1.0820351839065552, -0.22329439222812653, 0.8610181212425232, 0.6452286839485168, 0.2689347267150879, 0.7572471499443054, 0.6495551466941833, -0.559320330619812, 0.5707108974456787, 0.7947472333908081, -0.3048801124095917, 0.3414975106716156, 0.2967287003993988, -0.10954472422599792, -0.149196058511734, 0.43275147676467896, -0.8626430630683899, 0.09346795827150345, 0.07252637296915054, -0.299397736787796, 0.09975819289684296, -0.4702706038951874, 0.318474143743515, -0.04406220465898514, -0.03514016792178154, 0.382878839969635, 0.06782250106334686, -0.46941062808036804, 0.9644359946250916, -0.17418146133422852, 0.7775810956954956, -0.49496328830718994, -0.10945898294448853, -0.3634988069534302, 0.5996376276016235, -0.4697464406490326, -1.0795243978500366, 0.2030351161956787, 0.046391312032938004, -0.12658467888832092, -0.18818122148513794, 0.7048711180686951, -0.19408708810806274, -0.7919366955757141, 0.11307002604007721, 0.02083720825612545, 0.12261953204870224, 0.49038374423980713, -0.6682478785514832, -0.35102418065071106, -0.04603615030646324, -0.5461074113845825, 0.120638906955719, 0.31861406564712524, 0.24388107657432556, 0.5361392498016357, 0.6910925507545471, 0.12555906176567078, 0.4090346693992615, -0.5299361348152161, 0.7738742232322693, -1.0786874294281006, -0.7362317442893982, -0.909186065196991, 0.49379563331604004, -0.349405974149704, -0.8941212296485901, 0.9980504512786865, 1.0340452194213867, 0.8967146873474121, -0.03866725414991379, 0.5991635918617249, -0.399824857711792, 0.2730596959590912, -0.3853682577610016, 0.956708550453186, -0.838135302066803, -0.2622305452823639, -0.2909682095050812, -0.7253329753875732, -0.4001915454864502, 0.8529210090637207, -0.1601441353559494, 0.043282538652420044, 1.0847346782684326, 0.632927417755127, -0.08517592400312424, 0.021803883835673332, -0.0457177571952343, 0.5814764499664307, 0.37934085726737976, 1.0144306421279907, 0.6319921612739563, -0.8107759952545166, 0.3381952941417694, -0.5452753305435181, -0.4123385548591614, -0.405377060174942, -0.4811740219593048, -0.8806583881378174, -0.49252524971961975, -0.1920519322156906, -0.63124680519104, -0.17658154666423798, 0.9116217494010925, 0.429429292678833, -0.9207656979560852, -0.4390943646430969, -0.14451517164707184, 0.17311535775661469, -0.6007079482078552, -0.4296959638595581, 0.7110551595687866, -0.10466815531253815, -0.5441270470619202, 0.17484457790851593, -0.1420937031507492, 0.22010642290115356, 0.11853369325399399, -0.4208771288394928, -0.7595174908638, 0.0013373157707974315, 0.4343026876449585, 0.34660375118255615, -0.7047449946403503, -0.7346017360687256, 0.3449139893054962, -0.5392974615097046, 0.4462064504623413, -0.045275673270225525, -0.506965160369873, 0.023685075342655182, 0.7675390839576721, 0.493186891078949, 0.6821790337562561, 0.02338768169283867, 0.0995546355843544, -0.655655026435852, 0.18848322331905365, -0.0007312108064070344, 0.2725376784801483, -0.024439064785838127, -0.3740672171115875, 0.8149152994155884, 0.6807556748390198, -0.534407913684845, -1.0728273391723633, -0.4277240037918091, -1.4663277864456177, -0.042486611753702164, 1.1305991411209106, 0.06136728823184967, -0.5280407667160034, 0.2506749927997589, -0.13927842676639557, 0.20022131502628326, -0.2907100021839142, 0.7659422755241394, 0.8148331642150879, -0.3629837930202484, 0.10562559217214584, -0.6221221685409546, 0.3779357671737671, 0.5821962952613831, -1.2129991054534912, -0.10259932279586792, 0.2514753043651581, 0.311299592256546, 0.3641434609889984, 0.638653576374054, -0.11461678147315979, 0.26008591055870056, 0.25455448031425476, 0.045342136174440384, 0.0083768954500556, 0.06787356734275818, -0.2381666600704193, 0.05358294025063515, -0.25957658886909485, -0.4440283179283142 ]
open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna
open-llm-leaderboard
2023-08-27T12:40:18Z
201
0
[ "region:us" ]
null
2023-08-18T18:41:32Z
--- pretty_name: Evaluation run of grimpep/llama2-22b-wizard_vicuna dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [grimpep/llama2-22b-wizard_vicuna](https://huggingface.co/grimpep/llama2-22b-wizard_vicuna)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-17T14:12:20.144901](https://huggingface.co/datasets/open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna/blob/main/results_2023-08-17T14%3A12%3A20.144901.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5471453447112705,\n\ \ \"acc_stderr\": 0.034452241209601206,\n \"acc_norm\": 0.550874294679223,\n\ \ \"acc_norm_stderr\": 0.03443332656790291,\n \"mc1\": 0.31334149326805383,\n\ \ \"mc1_stderr\": 0.0162380650690596,\n \"mc2\": 0.4692973392633332,\n\ \ \"mc2_stderr\": 0.0156700439246235\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.560580204778157,\n \"acc_stderr\": 0.014503747823580123,\n\ \ \"acc_norm\": 0.5895904436860068,\n \"acc_norm_stderr\": 0.014374922192642662\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6290579565823541,\n\ \ \"acc_stderr\": 0.004820697457420421,\n \"acc_norm\": 0.8200557657837084,\n\ \ \"acc_norm_stderr\": 0.003833559228158675\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n\ \ \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n\ \ \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296564,\n\ \ \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296564\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n\ \ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.03024223380085449,\n\ \ \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.03024223380085449\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6319444444444444,\n\ \ \"acc_stderr\": 0.04032999053960718,\n \"acc_norm\": 0.6319444444444444,\n\ \ \"acc_norm_stderr\": 0.04032999053960718\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n\ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.48554913294797686,\n\ \ \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.48554913294797686,\n\ \ \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n\ \ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n\ \ \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4425531914893617,\n \"acc_stderr\": 0.03246956919789958,\n\ \ \"acc_norm\": 0.4425531914893617,\n \"acc_norm_stderr\": 0.03246956919789958\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n\ \ \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n\ \ \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n\ \ \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3306878306878307,\n \"acc_stderr\": 0.024229965298425082,\n \"\ acc_norm\": 0.3306878306878307,\n \"acc_norm_stderr\": 0.024229965298425082\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\ \ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\ \ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6258064516129033,\n\ \ \"acc_stderr\": 0.027528904299845697,\n \"acc_norm\": 0.6258064516129033,\n\ \ \"acc_norm_stderr\": 0.027528904299845697\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n\ \ \"acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\ : 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n\ \ \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6818181818181818,\n \"acc_stderr\": 0.03318477333845331,\n \"\ acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.03318477333845331\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.02925282329180363,\n\ \ \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.02925282329180363\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5307692307692308,\n \"acc_stderr\": 0.025302958890850154,\n\ \ \"acc_norm\": 0.5307692307692308,\n \"acc_norm_stderr\": 0.025302958890850154\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085622,\n \ \ \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n\ \ \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"\ acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.728440366972477,\n \"acc_stderr\": 0.01906909836319144,\n \"acc_norm\"\ : 0.728440366972477,\n \"acc_norm_stderr\": 0.01906909836319144\n },\n\ \ \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.41203703703703703,\n\ \ \"acc_stderr\": 0.03356787758160835,\n \"acc_norm\": 0.41203703703703703,\n\ \ \"acc_norm_stderr\": 0.03356787758160835\n },\n \"harness|hendrycksTest-high_school_us_history|5\"\ : {\n \"acc\": 0.7696078431372549,\n \"acc_stderr\": 0.02955429260569507,\n\ \ \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.02955429260569507\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \ \ \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n\ \ \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n\ \ \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6030534351145038,\n \"acc_stderr\": 0.04291135671009224,\n\ \ \"acc_norm\": 0.6030534351145038,\n \"acc_norm_stderr\": 0.04291135671009224\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7355371900826446,\n \"acc_stderr\": 0.04026187527591205,\n \"\ acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.04026187527591205\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n\ \ \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n\ \ \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.03680350371286461,\n\ \ \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.03680350371286461\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\ \ \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n\ \ \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n\ \ \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n\ \ \"acc_stderr\": 0.02777883590493543,\n \"acc_norm\": 0.7649572649572649,\n\ \ \"acc_norm_stderr\": 0.02777883590493543\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \ \ \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7598978288633461,\n\ \ \"acc_stderr\": 0.015274685213734195,\n \"acc_norm\": 0.7598978288633461,\n\ \ \"acc_norm_stderr\": 0.015274685213734195\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.026113749361310345,\n\ \ \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.026113749361310345\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3564245810055866,\n\ \ \"acc_stderr\": 0.016018239710513405,\n \"acc_norm\": 0.3564245810055866,\n\ \ \"acc_norm_stderr\": 0.016018239710513405\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.027826109307283693,\n\ \ \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.027826109307283693\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n\ \ \"acc_stderr\": 0.027368078243971646,\n \"acc_norm\": 0.6334405144694534,\n\ \ \"acc_norm_stderr\": 0.027368078243971646\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.0268228017595079,\n\ \ \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.0268228017595079\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4148936170212766,\n \"acc_stderr\": 0.029392236584612493,\n \ \ \"acc_norm\": 0.4148936170212766,\n \"acc_norm_stderr\": 0.029392236584612493\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.39960886571056065,\n\ \ \"acc_stderr\": 0.012510181636960672,\n \"acc_norm\": 0.39960886571056065,\n\ \ \"acc_norm_stderr\": 0.012510181636960672\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.030372836961539352,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.030372836961539352\n \ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\"\ : 0.5522875816993464,\n \"acc_stderr\": 0.020116925347422425,\n \"\ acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.020116925347422425\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n\ \ \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n\ \ \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6448979591836734,\n \"acc_stderr\": 0.030635655150387634,\n\ \ \"acc_norm\": 0.6448979591836734,\n \"acc_norm_stderr\": 0.030635655150387634\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6069651741293532,\n\ \ \"acc_stderr\": 0.0345368246603156,\n \"acc_norm\": 0.6069651741293532,\n\ \ \"acc_norm_stderr\": 0.0345368246603156\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \ \ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n\ \ \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n\ \ \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n\ \ \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31334149326805383,\n\ \ \"mc1_stderr\": 0.0162380650690596,\n \"mc2\": 0.4692973392633332,\n\ \ \"mc2_stderr\": 0.0156700439246235\n }\n}\n```" repo_url: https://huggingface.co/grimpep/llama2-22b-wizard_vicuna leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|arc:challenge|25_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hellaswag|10_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T14_12_20.144901 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T14:12:20.144901.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T14:12:20.144901.parquet' - config_name: results data_files: - split: 2023_08_17T14_12_20.144901 path: - results_2023-08-17T14:12:20.144901.parquet - split: latest path: - results_2023-08-17T14:12:20.144901.parquet --- # Dataset Card for Evaluation run of grimpep/llama2-22b-wizard_vicuna ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/grimpep/llama2-22b-wizard_vicuna - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [grimpep/llama2-22b-wizard_vicuna](https://huggingface.co/grimpep/llama2-22b-wizard_vicuna) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T14:12:20.144901](https://huggingface.co/datasets/open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna/blob/main/results_2023-08-17T14%3A12%3A20.144901.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5471453447112705, "acc_stderr": 0.034452241209601206, "acc_norm": 0.550874294679223, "acc_norm_stderr": 0.03443332656790291, "mc1": 0.31334149326805383, "mc1_stderr": 0.0162380650690596, "mc2": 0.4692973392633332, "mc2_stderr": 0.0156700439246235 }, "harness|arc:challenge|25": { "acc": 0.560580204778157, "acc_stderr": 0.014503747823580123, "acc_norm": 0.5895904436860068, "acc_norm_stderr": 0.014374922192642662 }, "harness|hellaswag|10": { "acc": 0.6290579565823541, "acc_stderr": 0.004820697457420421, "acc_norm": 0.8200557657837084, "acc_norm_stderr": 0.003833559228158675 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296564, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296564 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.03024223380085449, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.03024223380085449 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6319444444444444, "acc_stderr": 0.04032999053960718, "acc_norm": 0.6319444444444444, "acc_norm_stderr": 0.04032999053960718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4425531914893617, "acc_stderr": 0.03246956919789958, "acc_norm": 0.4425531914893617, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3306878306878307, "acc_stderr": 0.024229965298425082, "acc_norm": 0.3306878306878307, "acc_norm_stderr": 0.024229965298425082 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6258064516129033, "acc_stderr": 0.027528904299845697, "acc_norm": 0.6258064516129033, "acc_norm_stderr": 0.027528904299845697 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162934, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162934 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6818181818181818, "acc_stderr": 0.03318477333845331, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.03318477333845331 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.02925282329180363, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.02925282329180363 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5307692307692308, "acc_stderr": 0.025302958890850154, "acc_norm": 0.5307692307692308, "acc_norm_stderr": 0.025302958890850154 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.032252942323996406, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.032252942323996406 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.728440366972477, "acc_stderr": 0.01906909836319144, "acc_norm": 0.728440366972477, "acc_norm_stderr": 0.01906909836319144 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7696078431372549, "acc_stderr": 0.02955429260569507, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.02955429260569507 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7355371900826446, "acc_stderr": 0.04026187527591205, "acc_norm": 0.7355371900826446, "acc_norm_stderr": 0.04026187527591205 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.03680350371286461, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.03680350371286461 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.02777883590493543, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.02777883590493543 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7598978288633461, "acc_stderr": 0.015274685213734195, "acc_norm": 0.7598978288633461, "acc_norm_stderr": 0.015274685213734195 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6213872832369942, "acc_stderr": 0.026113749361310345, "acc_norm": 0.6213872832369942, "acc_norm_stderr": 0.026113749361310345 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3564245810055866, "acc_stderr": 0.016018239710513405, "acc_norm": 0.3564245810055866, "acc_norm_stderr": 0.016018239710513405 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6176470588235294, "acc_stderr": 0.027826109307283693, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.027826109307283693 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971646, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.0268228017595079, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.0268228017595079 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4148936170212766, "acc_stderr": 0.029392236584612493, "acc_norm": 0.4148936170212766, "acc_norm_stderr": 0.029392236584612493 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.39960886571056065, "acc_stderr": 0.012510181636960672, "acc_norm": 0.39960886571056065, "acc_norm_stderr": 0.012510181636960672 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5, "acc_stderr": 0.030372836961539352, "acc_norm": 0.5, "acc_norm_stderr": 0.030372836961539352 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5522875816993464, "acc_stderr": 0.020116925347422425, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.020116925347422425 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6448979591836734, "acc_stderr": 0.030635655150387634, "acc_norm": 0.6448979591836734, "acc_norm_stderr": 0.030635655150387634 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.31334149326805383, "mc1_stderr": 0.0162380650690596, "mc2": 0.4692973392633332, "mc2_stderr": 0.0156700439246235 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7114003896713257, -0.8379528522491455, 0.28216394782066345, 0.2152474820613861, -0.15707914531230927, -0.05624080449342728, 0.03789100795984268, -0.23540858924388885, 0.5523028373718262, -0.030919447541236877, -0.4799768924713135, -0.6884092688560486, -0.45064425468444824, 0.2291705310344696, -0.04168999567627907, 0.8329642415046692, -0.14510607719421387, -0.1266094297170639, 0.08518525958061218, 0.002423500642180443, -0.28104162216186523, -0.3337656855583191, -0.5101895928382874, -0.3390519917011261, 0.17435574531555176, 0.4506075084209442, 0.45463836193084717, 0.7571467161178589, 0.6703042984008789, 0.29210391640663147, -0.291851669549942, 0.04210776090621948, -0.190028578042984, -0.2815934121608734, 0.40097343921661377, -0.3952031135559082, -0.8397026658058167, 0.29434457421302795, 0.7592909932136536, 0.6109696626663208, -0.07752397656440735, 0.303810715675354, 0.06308062374591827, 0.5861401557922363, -0.3515777885913849, 0.031598467379808426, -0.3128407597541809, 0.21961143612861633, -0.20288428664207458, -0.3349056541919708, -0.2948524057865143, -0.22519859671592712, -0.19872449338436127, -0.8768107891082764, 0.2984636425971985, 0.2920520603656769, 1.5621904134750366, -0.1015997976064682, -0.2678219974040985, 0.13244764506816864, -0.12920747697353363, 0.9914412498474121, -0.8851275444030762, 0.34750646352767944, 0.8089178204536438, 0.14409461617469788, -0.23506852984428406, -0.5722309350967407, -0.6259711384773254, 0.0930488258600235, -0.33276820182800293, 0.3530839681625366, -0.04740883782505989, -0.1503807157278061, 0.3511466085910797, 0.667283296585083, -0.6672725081443787, 0.17997011542320251, -0.6834612488746643, -0.17068952322006226, 1.0784456729888916, 0.3789726793766022, 0.05924353003501892, -0.363430380821228, -0.68726646900177, -0.6555851101875305, -0.3682398498058319, 0.2464047074317932, 0.4468553066253662, 0.33034977316856384, -0.40512537956237793, 0.73707115650177, -0.38806048035621643, 0.5810078382492065, 0.4359404146671295, -0.01646900735795498, 0.9012256860733032, -0.6508618593215942, -0.5293552279472351, -0.09809374809265137, 1.1129543781280518, 0.6053531169891357, 0.06010439991950989, 0.20276635885238647, 0.006694423966109753, -0.10003544390201569, 0.03647670894861221, -0.8630395531654358, -0.2831772565841675, 0.20320336520671844, -0.39085036516189575, -0.5060807466506958, 0.32451215386390686, -0.8963122367858887, 0.0740562230348587, -0.042979028075933456, 0.409412145614624, -0.4922088086605072, -0.10761405527591705, 0.27504849433898926, -0.4429682791233063, 0.8364319801330566, -0.14163127541542053, -0.7978692054748535, 0.4207705855369568, 0.5396363139152527, 0.7693581581115723, -0.11773362755775452, -0.44759202003479004, -0.11513717472553253, -0.11170557886362076, -0.2973005473613739, 0.5630598068237305, -0.27514079213142395, -0.41648346185684204, -0.28694120049476624, 0.32042789459228516, -0.251493364572525, -0.36509233713150024, 0.7007430195808411, -0.21292664110660553, 0.22839120030403137, -0.40025824308395386, -0.6563103199005127, 0.12327099591493607, 0.36321625113487244, -0.4010795056819916, 1.3332759141921997, 0.24882206320762634, -0.844447135925293, 0.41276121139526367, -0.5446010231971741, -0.11535941064357758, -0.039343107491731644, -0.07428814470767975, -0.7852068543434143, -0.24516062438488007, 0.14068686962127686, 0.40308162569999695, -0.17631733417510986, -0.10365060716867447, -0.3756633698940277, -0.3734770119190216, 0.33427053689956665, -0.22746597230434418, 1.2442967891693115, -0.038943883031606674, -0.7801849246025085, -0.15295284986495972, -1.2867158651351929, 0.30099907517433167, 0.22157175838947296, -0.3599179983139038, -0.18335047364234924, -0.46828120946884155, 0.004633753560483456, 0.18010593950748444, 0.32009533047676086, -0.776849627494812, 0.3102218508720398, -0.3556634187698364, 0.1008630245923996, 1.27349054813385, 0.03813505172729492, 0.13991665840148926, -0.5331974029541016, 0.5335506796836853, 0.15480318665504456, 0.22318579256534576, 0.4650335907936096, -0.6520181894302368, -0.7960378527641296, -0.49211999773979187, -0.07679201662540436, 0.6538568139076233, -0.18139299750328064, 1.139934778213501, 0.09473444521427155, -0.9196967482566833, -0.42906853556632996, -0.13314248621463776, 0.48202094435691833, 0.772559404373169, 0.5969931483268738, -0.05906490236520767, -0.5920701026916504, -1.1282258033752441, -0.2828015983104706, -0.1910157948732376, 0.10385482758283615, 0.20366492867469788, 0.9957736730575562, -0.28721556067466736, 0.6066890954971313, -1.0437827110290527, -0.19705936312675476, 0.20188948512077332, -0.09320475161075592, 0.806744396686554, 0.7174967527389526, 0.6172295212745667, -0.6539121270179749, -0.5194258689880371, 0.19027723371982574, -0.8713334798812866, -0.1291077882051468, 0.11801053583621979, -0.33358141779899597, 0.09539639949798584, 0.13248518109321594, -0.6955659985542297, 0.5505114197731018, 0.23612898588180542, -1.118861436843872, 1.090200424194336, -0.33498653769493103, 0.6010541319847107, -1.0165067911148071, 0.17673659324645996, -0.09545943140983582, 0.018870165571570396, -0.5076700448989868, 0.010897604748606682, 0.08334283530712128, 0.4543910324573517, -0.4788667857646942, 0.8392589688301086, -0.6753811836242676, -0.07771595567464828, 0.4034130871295929, 0.1337224841117859, -0.09945067763328552, 0.36923947930336, -0.24602970480918884, 0.8200600743293762, 0.7492159008979797, -0.4777655601501465, 0.5308180451393127, 0.4041087031364441, -0.17514845728874207, 0.7123879194259644, -0.47583284974098206, -0.31348058581352234, 0.3056642711162567, -0.06908131390810013, -0.8480957746505737, -0.4590925872325897, 0.07844522595405579, -0.6028043031692505, -0.07904145866632462, 0.36429744958877563, -0.2973405420780182, -0.7785016894340515, -0.9468832015991211, 0.30788350105285645, 0.6949988603591919, -0.4061281681060791, -0.1730635166168213, 0.062562957406044, 0.09870906174182892, -0.838442325592041, -0.8345644474029541, -0.4624432325363159, -0.2330077439546585, -0.7082969546318054, 0.32473865151405334, -0.2741779088973999, -0.2997925281524658, -0.12991701066493988, -0.2320484071969986, -0.3267240524291992, 0.016449179500341415, 0.13948503136634827, 0.6656916737556458, -0.3634631931781769, -0.27297383546829224, -0.23411041498184204, -0.18694432079792023, 0.23139159381389618, -0.08583367615938187, 0.4096147418022156, -0.4496241509914398, -0.39530593156814575, -0.4270377457141876, -0.026607120409607887, 0.6949499249458313, -0.041221439838409424, 0.74101722240448, 0.4564477205276489, -0.3098198473453522, -0.008158060722053051, -0.3246432840824127, -0.2754644453525543, -0.5880500674247742, 0.2910067141056061, -0.4795628786087036, -0.9759753346443176, 0.8009646534919739, 0.5482512712478638, 0.10784731805324554, 1.153528094291687, 0.6152083277702332, -0.25303971767425537, 0.9847527146339417, 0.08496405184268951, 0.36033895611763, 0.41055190563201904, -0.7046853303909302, 0.08922326564788818, -0.906531810760498, -0.3280640244483948, -0.5878278017044067, -0.460030198097229, -0.743327796459198, -0.13845767080783844, 0.26061001420021057, 0.16513684391975403, -0.6861106753349304, 0.5769447088241577, -0.8579556941986084, 0.5435828566551208, 0.5492192506790161, 0.2633947730064392, 0.1685423105955124, -0.14701902866363525, -0.3676724135875702, -0.09833382815122604, -0.4325374364852905, -0.2788601219654083, 1.2471548318862915, 0.2634885311126709, 0.7280102968215942, 0.05778784304857254, 0.9088029861450195, 0.11612796038389206, -0.12267984449863434, -0.5848719477653503, 0.6459653377532959, 0.1090514287352562, -0.8135790228843689, -0.41260862350463867, -0.49986037611961365, -1.1077854633331299, 0.43452173471450806, -0.13488823175430298, -0.8904048204421997, 0.17171061038970947, 0.010861484333872795, -0.16928908228874207, 0.49442264437675476, -0.5455374717712402, 0.8216893672943115, -0.1405998170375824, -0.4912313222885132, 0.08428527414798737, -0.8057164549827576, 0.4649585485458374, 0.20513774454593658, 0.2712844908237457, 0.030881665647029877, 0.2120472937822342, 1.2012863159179688, -0.830998420715332, 0.44243478775024414, 0.0820508524775505, -0.010002481751143932, 0.31775951385498047, -0.1710539013147354, 0.5098081827163696, 0.11161365360021591, -0.011606658808887005, -0.10818731784820557, 0.3081248104572296, -0.884009838104248, -0.06445063650608063, 0.8903590440750122, -0.97604900598526, -0.6215808987617493, -0.901657223701477, -0.5294713973999023, 0.04116775467991829, 0.5391367077827454, 0.3912856876850128, 0.525326669216156, 0.029094276949763298, 0.4054611623287201, 0.8487105369567871, -0.13936524093151093, 0.5767900347709656, 0.23870813846588135, 0.07098430395126343, -0.6584711670875549, 0.8463156819343567, 0.05597902461886406, 0.37019115686416626, 0.2554667294025421, 0.38311290740966797, -0.4899829626083374, -0.19848312437534332, -0.19696927070617676, 0.5218616127967834, -0.6472460031509399, -0.27571797370910645, -0.37651702761650085, -0.3914584815502167, -0.7323013544082642, -0.6147515773773193, -0.35178104043006897, -0.555773138999939, -0.5024814605712891, -0.45902198553085327, 0.6041483879089355, 0.47955822944641113, -0.39945292472839355, 0.019568337127566338, -0.48026371002197266, 0.3044195771217346, 0.33251529932022095, 0.5202391147613525, -0.3813859522342682, -0.5888526439666748, 0.05106348916888237, -0.12963445484638214, -0.5969261527061462, -0.9952026009559631, 0.33637160062789917, -0.04920750856399536, 0.5229184031486511, 0.603020191192627, 0.048465169966220856, 0.8345321416854858, -0.21681927144527435, 1.0686402320861816, 0.33082446455955505, -0.728782057762146, 0.7166728973388672, -0.32150304317474365, 0.13368824124336243, 0.6568789482116699, 0.1476472020149231, -0.18096306920051575, -0.708396315574646, -1.3152217864990234, -0.7980362176895142, 0.6276612877845764, 0.39359229803085327, -0.2734912037849426, 0.035930756479501724, 0.1636352390050888, -0.24137385189533234, -0.1884848028421402, -0.6525864601135254, -0.936565637588501, -0.12715722620487213, -0.46822088956832886, 0.08236297965049744, 0.04092186689376831, -0.39068880677223206, -0.7992874383926392, 0.9178826212882996, 0.00996325258165598, 0.6065687537193298, 0.459035724401474, 0.09756013751029968, 0.0967828631401062, 0.4747141897678375, 0.8963567614555359, 0.7392557859420776, -0.4826599657535553, 0.40924063324928284, 0.4067401885986328, -1.0974631309509277, 0.46962445974349976, 0.3395466208457947, -0.06819376349449158, -0.01475045271217823, 0.46482568979263306, 0.4010632634162903, 0.01811729557812214, -0.19355067610740662, 0.6384425759315491, -0.03698565438389778, -0.5447002053260803, -0.40222224593162537, 0.11305731534957886, -0.1436372995376587, 0.018194975331425667, 0.4103547930717468, -0.1382342278957367, 0.015884332358837128, -0.4932984709739685, 0.44981294870376587, 0.3951922059059143, -0.45820775628089905, -0.1352589726448059, 0.738854169845581, -0.20066075026988983, -0.14413034915924072, 0.30955591797828674, -0.17776180803775787, -0.6044664978981018, 1.157518982887268, 0.5999515652656555, 0.6951021552085876, -0.23666733503341675, -0.04106209799647331, 0.8902689814567566, 0.3911876976490021, -0.031273532658815384, 0.5226008296012878, 0.31276196241378784, -0.23679904639720917, 0.18743784725666046, -0.8674233555793762, -0.04347090795636177, 0.14522939920425415, -0.8138295412063599, 0.3609582185745239, -0.5051881074905396, -0.22238104045391083, -0.0192703939974308, 0.4547156095504761, -0.4827558398246765, 0.5375224351882935, -0.40395858883857727, 1.2199161052703857, -0.99409019947052, 0.6805406808853149, 0.7094547152519226, -0.523231029510498, -1.002227544784546, -0.5188913941383362, 0.016866235062479973, -0.8178572654724121, 0.5522374510765076, -0.05022629722952843, 0.15009011328220367, -0.1084541603922844, -0.7032806873321533, -0.922386884689331, 1.4052492380142212, -0.025754448026418686, -0.4502822756767273, 0.21236488223075867, -0.054182812571525574, 0.4532003700733185, 0.17662429809570312, 0.6117113828659058, 0.7451843023300171, 0.826446533203125, -0.12814263999462128, -0.7544748187065125, 0.3201928436756134, -0.5160443186759949, -0.34295958280563354, 0.4472409188747406, -0.932226836681366, 1.1832712888717651, 0.008717762306332588, 0.19644637405872345, -0.16521412134170532, 0.6435830593109131, 0.8185544610023499, 0.29062020778656006, 0.3422352969646454, 0.8835360407829285, 0.8840826749801636, -0.44946056604385376, 1.0549933910369873, -0.20409077405929565, 0.8547617793083191, 0.6814862489700317, 0.2822745442390442, 0.7619456648826599, 0.6806924343109131, -0.5815538763999939, 0.5568875074386597, 0.8262491226196289, -0.31085094809532166, 0.39420777559280396, 0.2867293059825897, -0.10345448553562164, -0.15596966445446014, 0.44010549783706665, -0.9106279015541077, 0.11978879570960999, 0.0777292549610138, -0.3216719925403595, 0.08888950198888779, -0.4446659982204437, 0.31022924184799194, -0.09153496474027634, -0.030730538070201874, 0.34217938780784607, 0.054725367575883865, -0.41504064202308655, 0.979752242565155, -0.1534261554479599, 0.7822329998016357, -0.5299236178398132, -0.10483790189027786, -0.38114598393440247, 0.5868879556655884, -0.46585533022880554, -1.0419652462005615, 0.126565620303154, 0.05850044637918472, -0.12863856554031372, -0.1095544844865799, 0.6784493923187256, -0.18483471870422363, -0.797668993473053, 0.16595642268657684, 0.06506755203008652, 0.13088834285736084, 0.5235424637794495, -0.6681274175643921, -0.3669140934944153, -0.06451310962438583, -0.5867612361907959, 0.12208773195743561, 0.3161080479621887, 0.2813166081905365, 0.5553482174873352, 0.6067941188812256, 0.18209852278232574, 0.4343236982822418, -0.5193700194358826, 0.80274897813797, -1.0301487445831299, -0.6967669129371643, -0.8825647830963135, 0.4587565064430237, -0.33690011501312256, -0.8829177618026733, 0.9943551421165466, 1.054551601409912, 0.846775233745575, 0.01456591859459877, 0.6298174262046814, -0.3713502883911133, 0.23175519704818726, -0.40490972995758057, 0.9010502696037292, -0.8704544305801392, -0.20453965663909912, -0.2718287706375122, -0.7663962244987488, -0.36962220072746277, 0.822844386100769, -0.17764544486999512, 0.04112139716744423, 1.0628838539123535, 0.6916847825050354, -0.10592327266931534, -0.018991300836205482, -0.035390954464673996, 0.5704346299171448, 0.38547879457473755, 0.9908505082130432, 0.6224765181541443, -0.819111704826355, 0.3270776867866516, -0.5120561718940735, -0.45075225830078125, -0.39154085516929626, -0.43635618686676025, -0.8965935111045837, -0.5019729733467102, -0.20504848659038544, -0.6401746273040771, -0.09820198267698288, 0.973578929901123, 0.4037555158138275, -0.8989629745483398, -0.408917635679245, -0.11457108706235886, 0.14382833242416382, -0.5930047631263733, -0.41992101073265076, 0.7484592199325562, -0.13768894970417023, -0.5579295754432678, 0.19497358798980713, -0.14590422809123993, 0.1989801824092865, 0.09076953679323196, -0.4105118215084076, -0.7022808194160461, 0.0488380528986454, 0.4088820219039917, 0.3342965245246887, -0.6933239698410034, -0.705440104007721, 0.28969281911849976, -0.5353623628616333, 0.41781312227249146, -0.046349916607141495, -0.520332932472229, 0.051830679178237915, 0.7133174538612366, 0.44913509488105774, 0.6785884499549866, -0.04217115417122841, 0.08324169367551804, -0.6218639612197876, 0.19086897373199463, -0.007133276201784611, 0.29280179738998413, -0.027245070785284042, -0.34429654479026794, 0.7806693315505981, 0.6529386639595032, -0.5619717240333557, -1.0630642175674438, -0.4275585412979126, -1.4240010976791382, -0.04320867732167244, 1.1595454216003418, 0.05996646732091904, -0.4838084578514099, 0.2285078465938568, -0.16352669894695282, 0.19345705211162567, -0.2991087734699249, 0.7779784798622131, 0.7427462339401245, -0.35158324241638184, 0.14475706219673157, -0.6862615942955017, 0.36615800857543945, 0.4998805820941925, -1.188107967376709, -0.07262036204338074, 0.2475663721561432, 0.33085981011390686, 0.35215356945991516, 0.6425878405570984, -0.1160603016614914, 0.2832282781600952, 0.2827301323413849, 0.05835384503006935, 0.0057755508460104465, 0.05734076723456383, -0.21757392585277557, 0.05032799392938614, -0.24062766134738922, -0.45204970240592957 ]
open-llm-leaderboard/details_timdettmers__guanaco-65b-merged
open-llm-leaderboard
2023-08-27T12:41:13Z
201
0
[ "region:us" ]
null
2023-08-18T18:53:30Z
--- pretty_name: Evaluation run of timdettmers/guanaco-65b-merged dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [timdettmers/guanaco-65b-merged](https://huggingface.co/timdettmers/guanaco-65b-merged)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_timdettmers__guanaco-65b-merged\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-18T00:17:34.582006](https://huggingface.co/datasets/open-llm-leaderboard/details_timdettmers__guanaco-65b-merged/blob/main/results_2023-08-18T00%3A17%3A34.582006.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25104389504062485,\n\ \ \"acc_stderr\": 0.030647487837110618,\n \"acc_norm\": 0.2523346329049775,\n\ \ \"acc_norm_stderr\": 0.030669736900925226,\n \"mc1\": 0.24112607099143207,\n\ \ \"mc1_stderr\": 0.014974827279752346,\n \"mc2\": 0.4840947451540454,\n\ \ \"mc2_stderr\": 0.016324348732205056\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2030716723549488,\n \"acc_stderr\": 0.011755899303705582,\n\ \ \"acc_norm\": 0.27474402730375425,\n \"acc_norm_stderr\": 0.013044617212771227\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2615016928898626,\n\ \ \"acc_stderr\": 0.004385544487143912,\n \"acc_norm\": 0.26598287193786097,\n\ \ \"acc_norm_stderr\": 0.004409521343140112\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n\ \ \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n\ \ \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n\ \ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n\ \ \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n\ \ \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \ \ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n\ \ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"\ acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"\ acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"\ acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n\ \ \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"\ acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n\ \ \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n\ \ \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \ \ \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\ \ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"\ acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"\ acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"\ acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"\ acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598035,\n \ \ \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598035\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n\ \ \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n\ \ \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\ \ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2948717948717949,\n\ \ \"acc_stderr\": 0.029872577708891148,\n \"acc_norm\": 0.2948717948717949,\n\ \ \"acc_norm_stderr\": 0.029872577708891148\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n\ \ \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n\ \ \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\ \ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\ \ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\ \ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\ \ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \ \ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.26792698826597133,\n\ \ \"acc_stderr\": 0.011311347690633881,\n \"acc_norm\": 0.26792698826597133,\n\ \ \"acc_norm_stderr\": 0.011311347690633881\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\ \ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n\ \ \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n\ \ \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\ \ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\ \ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\ \ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.24112607099143207,\n \"mc1_stderr\": 0.014974827279752346,\n\ \ \"mc2\": 0.4840947451540454,\n \"mc2_stderr\": 0.016324348732205056\n\ \ }\n}\n```" repo_url: https://huggingface.co/timdettmers/guanaco-65b-merged leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|arc:challenge|25_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hellaswag|10_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_18T00_17_34.582006 path: - '**/details_harness|truthfulqa:mc|0_2023-08-18T00:17:34.582006.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-18T00:17:34.582006.parquet' - config_name: results data_files: - split: 2023_08_18T00_17_34.582006 path: - results_2023-08-18T00:17:34.582006.parquet - split: latest path: - results_2023-08-18T00:17:34.582006.parquet --- # Dataset Card for Evaluation run of timdettmers/guanaco-65b-merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/timdettmers/guanaco-65b-merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [timdettmers/guanaco-65b-merged](https://huggingface.co/timdettmers/guanaco-65b-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_timdettmers__guanaco-65b-merged", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-18T00:17:34.582006](https://huggingface.co/datasets/open-llm-leaderboard/details_timdettmers__guanaco-65b-merged/blob/main/results_2023-08-18T00%3A17%3A34.582006.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25104389504062485, "acc_stderr": 0.030647487837110618, "acc_norm": 0.2523346329049775, "acc_norm_stderr": 0.030669736900925226, "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752346, "mc2": 0.4840947451540454, "mc2_stderr": 0.016324348732205056 }, "harness|arc:challenge|25": { "acc": 0.2030716723549488, "acc_stderr": 0.011755899303705582, "acc_norm": 0.27474402730375425, "acc_norm_stderr": 0.013044617212771227 }, "harness|hellaswag|10": { "acc": 0.2615016928898626, "acc_stderr": 0.004385544487143912, "acc_norm": 0.26598287193786097, "acc_norm_stderr": 0.004409521343140112 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.02584501798692692, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.02584501798692692 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598035, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598035 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2948717948717949, "acc_stderr": 0.029872577708891148, "acc_norm": 0.2948717948717949, "acc_norm_stderr": 0.029872577708891148 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.26792698826597133, "acc_stderr": 0.011311347690633881, "acc_norm": 0.26792698826597133, "acc_norm_stderr": 0.011311347690633881 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752346, "mc2": 0.4840947451540454, "mc2_stderr": 0.016324348732205056 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7214817404747009, -0.8434305787086487, 0.28907284140586853, 0.18891510367393494, -0.17493198812007904, -0.049714528024196625, -0.006837944500148296, -0.235641211271286, 0.5760561227798462, -0.05425482243299484, -0.4867230951786041, -0.7008051872253418, -0.43952417373657227, 0.22016295790672302, -0.03507731482386589, 0.8469678163528442, -0.19089549779891968, -0.14393877983093262, 0.09751792997121811, -0.028960607945919037, -0.24123206734657288, -0.3345029652118683, -0.5116406679153442, -0.36762794852256775, 0.22103294730186462, 0.43443214893341064, 0.4484112560749054, 0.8194027543067932, 0.6831125617027283, 0.30067089200019836, -0.3421746790409088, 0.037379734218120575, -0.15787813067436218, -0.29296526312828064, 0.3691638708114624, -0.32620173692703247, -0.8480736017227173, 0.3152780830860138, 0.7501988410949707, 0.6137897372245789, -0.09577556699514389, 0.2494501769542694, 0.05977355316281319, 0.5427529215812683, -0.35346004366874695, 0.019095750525593758, -0.2644801735877991, 0.23819924890995026, -0.1947196125984192, -0.2752591371536255, -0.2951607406139374, -0.2713009715080261, -0.16272762417793274, -0.833492636680603, 0.29579752683639526, 0.29244184494018555, 1.5888367891311646, -0.17501437664031982, -0.25254932045936584, 0.11331302672624588, -0.14325803518295288, 1.0058050155639648, -0.8642318248748779, 0.361922949552536, 0.769903302192688, 0.11265461146831512, -0.1803077757358551, -0.6096705794334412, -0.650352418422699, 0.09569976478815079, -0.3706560730934143, 0.33996689319610596, -0.06408710032701492, -0.15597322583198547, 0.3440161943435669, 0.6907998919487, -0.655175507068634, 0.17640958726406097, -0.694168746471405, -0.1512332260608673, 1.0595695972442627, 0.3462647795677185, 0.07253332436084747, -0.36808860301971436, -0.7060429453849792, -0.6609668731689453, -0.3822927474975586, 0.23428021371364594, 0.4467598795890808, 0.35716164112091064, -0.4077121317386627, 0.6797483563423157, -0.3849640190601349, 0.550894558429718, 0.392323762178421, 0.0578724667429924, 0.867981493473053, -0.68204665184021, -0.5775970816612244, -0.09110423177480698, 1.1406385898590088, 0.5427938103675842, 0.06623006612062454, 0.19260115921497345, 0.025364980101585388, -0.11820442229509354, 0.041919413954019547, -0.8197094202041626, -0.290131539106369, 0.1981741487979889, -0.36672794818878174, -0.5113651752471924, 0.3718753457069397, -0.8863173723220825, 0.15392066538333893, -0.020988453179597855, 0.38890695571899414, -0.497647762298584, -0.10306369513273239, 0.24046958982944489, -0.4176517128944397, 0.8213861584663391, -0.15139107406139374, -0.8147785663604736, 0.37030330300331116, 0.5135716795921326, 0.7936733961105347, -0.0890924260020256, -0.4560699462890625, -0.11850491166114807, -0.07477891445159912, -0.25961577892303467, 0.5422750115394592, -0.24692632257938385, -0.43641138076782227, -0.2725393772125244, 0.29565680027008057, -0.2815307378768921, -0.33756744861602783, 0.754572868347168, -0.2266601324081421, 0.2586706578731537, -0.4166131019592285, -0.5951804518699646, 0.14704720675945282, 0.38494542241096497, -0.4118286967277527, 1.2961487770080566, 0.28451040387153625, -0.7961618900299072, 0.4118567705154419, -0.5922385454177856, -0.162660613656044, -0.0060589066706597805, -0.029573623090982437, -0.7765201926231384, -0.2686397135257721, 0.18421132862567902, 0.39838868379592896, -0.1834205836057663, -0.12510240077972412, -0.37003785371780396, -0.3558109700679779, 0.3390945494174957, -0.16999557614326477, 1.241112470626831, 0.0019099434139207006, -0.7612770199775696, -0.08258586376905441, -1.1791151762008667, 0.3467629551887512, 0.18088439106941223, -0.3618457317352295, -0.1801326721906662, -0.5256709456443787, 0.00364897632971406, 0.17654983699321747, 0.2861301600933075, -0.769669234752655, 0.30754899978637695, -0.3309871256351471, 0.19058163464069366, 1.2572667598724365, 0.021851859986782074, 0.1708173006772995, -0.5677090287208557, 0.5179101228713989, 0.21497181057929993, 0.21922153234481812, 0.40479591488838196, -0.5799883008003235, -0.8080626130104065, -0.5351182818412781, -0.033966656774282455, 0.5881173610687256, -0.15637820959091187, 1.1597591638565063, 0.05949213728308678, -0.8941824436187744, -0.45503416657447815, -0.1303671896457672, 0.48385879397392273, 0.7610601186752319, 0.6142996549606323, -0.03813903406262398, -0.6242455840110779, -1.0757369995117188, -0.26548683643341064, -0.18227525055408478, 0.118599534034729, 0.20266537368297577, 1.0201358795166016, -0.28240570425987244, 0.5753456950187683, -1.0512222051620483, -0.22081208229064941, 0.20349878072738647, -0.04626697301864624, 0.7693964242935181, 0.7596774697303772, 0.5739966034889221, -0.6554383635520935, -0.519557774066925, 0.258270800113678, -0.9050395488739014, -0.0712113156914711, 0.14582136273384094, -0.3030208647251129, 0.11993664503097534, 0.1445232331752777, -0.7183334231376648, 0.5189926624298096, 0.2212638407945633, -1.0473308563232422, 1.0170466899871826, -0.37033912539482117, 0.6345700621604919, -1.0102002620697021, 0.16539807617664337, -0.06602203845977783, 0.04081343114376068, -0.49813538789749146, 0.06939448416233063, 0.11471325159072876, 0.44412553310394287, -0.5122731328010559, 0.7905771732330322, -0.6966114044189453, -0.025155259296298027, 0.43929699063301086, 0.12229570001363754, -0.11955036967992783, 0.334955632686615, -0.2647677958011627, 0.801672101020813, 0.7289044260978699, -0.464019238948822, 0.5515978932380676, 0.39434748888015747, -0.19533845782279968, 0.7192001342773438, -0.4771219789981842, -0.2707454562187195, 0.3011152446269989, -0.06535743176937103, -0.8557851910591125, -0.49788326025009155, 0.030170956626534462, -0.6031089425086975, -0.12162356823682785, 0.34539300203323364, -0.27055394649505615, -0.8238695859909058, -0.9361106753349304, 0.3340260684490204, 0.6977533102035522, -0.45864593982696533, -0.14987388253211975, 0.031721409410238266, 0.11481388658285141, -0.8261881470680237, -0.8601343035697937, -0.4620901942253113, -0.252758651971817, -0.742201030254364, 0.34589090943336487, -0.26031333208084106, -0.28386831283569336, -0.06123656779527664, -0.19851703941822052, -0.3454592227935791, -0.042368605732917786, 0.14089882373809814, 0.6773216724395752, -0.3980199694633484, -0.3255453407764435, -0.22786922752857208, -0.13953286409378052, 0.2114662230014801, -0.12864114344120026, 0.37882697582244873, -0.47213152050971985, -0.3726654648780823, -0.40791353583335876, -0.04090399295091629, 0.7513224482536316, -0.09606786072254181, 0.758573591709137, 0.43238386511802673, -0.3077857494354248, -0.017370371147990227, -0.3095693290233612, -0.24585206806659698, -0.5838539004325867, 0.23561514914035797, -0.4770336151123047, -1.055016279220581, 0.8377220630645752, 0.5517222285270691, 0.025850065052509308, 1.1211506128311157, 0.59397292137146, -0.31898757815361023, 0.9772445559501648, 0.02934614010155201, 0.34545496106147766, 0.40051087737083435, -0.7043159008026123, 0.11012132465839386, -0.9103876948356628, -0.3603306710720062, -0.6042210459709167, -0.4780842959880829, -0.7174680233001709, -0.09616531431674957, 0.24724234640598297, 0.13114185631275177, -0.655892014503479, 0.6147975325584412, -0.8402080535888672, 0.5957087874412537, 0.5568787455558777, 0.2701849937438965, 0.17206797003746033, -0.1576351523399353, -0.38634824752807617, -0.11121360957622528, -0.4517832398414612, -0.22002986073493958, 1.2019169330596924, 0.2517344653606415, 0.7328768968582153, 0.09991125017404556, 0.9031336903572083, 0.11087493598461151, -0.10122881084680557, -0.5822585225105286, 0.6443433165550232, 0.10017134249210358, -0.7740899920463562, -0.40871474146842957, -0.5118330121040344, -1.1024218797683716, 0.4000909626483917, -0.12727497518062592, -0.8463255763053894, 0.13777579367160797, 0.0012725531123578548, -0.18197961151599884, 0.5025022625923157, -0.5651789903640747, 0.8376866579055786, -0.13066060841083527, -0.5069732069969177, 0.07376781851053238, -0.8134046792984009, 0.48389583826065063, 0.2118501514196396, 0.228105366230011, 0.08750587701797485, 0.2728874087333679, 1.161849021911621, -0.8375874161720276, 0.38486769795417786, 0.12095160037279129, 0.02022111415863037, 0.3326762020587921, -0.19178415834903717, 0.4971364736557007, 0.08397239446640015, 0.004266148898750544, -0.09176050871610641, 0.2785680294036865, -0.8479509353637695, -0.0994107648730278, 0.9812958240509033, -0.9512944221496582, -0.5899239778518677, -0.9164607524871826, -0.5362838506698608, 0.1088271215558052, 0.5577196478843689, 0.371713250875473, 0.511347234249115, 0.012359236367046833, 0.44931644201278687, 0.8502987027168274, -0.08288470655679703, 0.6128406524658203, 0.2283223569393158, 0.12626978754997253, -0.7217826843261719, 0.7763473391532898, 0.07609458267688751, 0.3313920795917511, 0.27172553539276123, 0.3771613538265228, -0.5507979989051819, -0.21676738560199738, -0.17490987479686737, 0.5314617156982422, -0.6184136271476746, -0.2749023139476776, -0.3358226418495178, -0.3935888111591339, -0.7688429355621338, -0.6551088094711304, -0.2892302870750427, -0.4869215786457062, -0.47006675601005554, -0.4984358251094818, 0.5775454640388489, 0.4701070189476013, -0.3919405937194824, 0.08285628259181976, -0.5205805897712708, 0.21952345967292786, 0.3378114402294159, 0.5299524664878845, -0.4002242088317871, -0.5758174061775208, 0.013652954250574112, -0.1444777548313141, -0.5531713962554932, -0.961758017539978, 0.3576544523239136, -0.10858889669179916, 0.49818167090415955, 0.586291491985321, 0.06124380603432655, 0.8871212005615234, -0.1804889440536499, 1.0288602113723755, 0.332061767578125, -0.7886487245559692, 0.7447121143341064, -0.370584636926651, 0.17879647016525269, 0.6446830630302429, 0.17494414746761322, -0.18701063096523285, -0.692570686340332, -1.3444453477859497, -0.7975200414657593, 0.6575443744659424, 0.3932493329048157, -0.27015575766563416, 0.029547590762376785, 0.13903379440307617, -0.341226726770401, -0.1887781023979187, -0.6623291373252869, -0.8737896084785461, -0.12469396740198135, -0.5034666061401367, 0.11289916187524796, 0.06240667402744293, -0.44812801480293274, -0.8286247253417969, 0.8964551091194153, 0.020530138164758682, 0.5820743441581726, 0.4515085518360138, 0.06772829592227936, 0.06579772382974625, 0.49395179748535156, 0.9441322684288025, 0.725374698638916, -0.47813746333122253, 0.45016053318977356, 0.391819566488266, -1.0524537563323975, 0.4781368672847748, 0.33259543776512146, -0.06713113188743591, -0.0315239392220974, 0.4577016532421112, 0.42190974950790405, 0.03912971541285515, -0.17626680433750153, 0.5940364003181458, -0.029712799936532974, -0.5652984380722046, -0.3814746141433716, 0.1141369566321373, -0.14655445516109467, 0.01798025332391262, 0.4130353331565857, -0.13799339532852173, -0.08226904273033142, -0.5122212767601013, 0.476602703332901, 0.39097777009010315, -0.4752972424030304, -0.15523704886436462, 0.7550535798072815, -0.17080864310264587, -0.15347623825073242, 0.369287371635437, -0.1594877541065216, -0.6168072819709778, 1.1179279088974, 0.5866739749908447, 0.7258977293968201, -0.26932141184806824, -0.07004626095294952, 0.9171162247657776, 0.39281392097473145, -0.061187468469142914, 0.4882161617279053, 0.30610087513923645, -0.24302731454372406, 0.16695339977741241, -0.8177571892738342, -0.0504896305501461, 0.18302033841609955, -0.8278537392616272, 0.3661237359046936, -0.535391092300415, -0.20727109909057617, -0.0063757761381566525, 0.42081522941589355, -0.45373257994651794, 0.575793445110321, -0.43631720542907715, 1.1862380504608154, -1.0137299299240112, 0.7442664504051208, 0.7639684677124023, -0.5800327062606812, -1.0221288204193115, -0.5228274464607239, -0.01023197639733553, -0.7621707320213318, 0.5833461284637451, -0.07962106168270111, 0.1652405858039856, -0.08195032179355621, -0.7020465731620789, -0.8779783248901367, 1.420761227607727, -0.02839275263249874, -0.39325687289237976, 0.2023845613002777, -0.0713777244091034, 0.46370187401771545, 0.15076816082000732, 0.6328535676002502, 0.7478142976760864, 0.8240520358085632, -0.06091566011309624, -0.7625213265419006, 0.33100414276123047, -0.4819730818271637, -0.32480499148368835, 0.495127409696579, -0.9301304817199707, 1.223659873008728, 0.007127026095986366, 0.20519818365573883, -0.18579034507274628, 0.6875147819519043, 0.7528907060623169, 0.2710944414138794, 0.33609217405319214, 0.9209498167037964, 0.8872007727622986, -0.5356892943382263, 0.9882973432540894, -0.19394850730895996, 0.9022404551506042, 0.6762337684631348, 0.23790325224399567, 0.7542524337768555, 0.6806161999702454, -0.5617650747299194, 0.5724769830703735, 0.7748497128486633, -0.31164029240608215, 0.37968015670776367, 0.2111995965242386, -0.17031078040599823, -0.11359427124261856, 0.4278641939163208, -0.885831892490387, 0.12034716457128525, 0.04394198954105377, -0.35991302132606506, 0.08444786071777344, -0.4616151452064514, 0.3125791847705841, -0.04973756894469261, -0.0181922297924757, 0.349717378616333, 0.01755044423043728, -0.4447152614593506, 0.9525895714759827, -0.13572047650814056, 0.7761023640632629, -0.5638834238052368, -0.05718287453055382, -0.4151710569858551, 0.6159514784812927, -0.4777410626411438, -1.0836924314498901, 0.17681993544101715, 0.0599287673830986, -0.14094150066375732, -0.19042858481407166, 0.682464599609375, -0.22958511114120483, -0.7134802937507629, 0.14602917432785034, 0.0481809601187706, 0.0734892338514328, 0.5372967720031738, -0.6278882026672363, -0.3207419812679291, -0.030354097485542297, -0.4953344464302063, 0.1377457082271576, 0.3003363609313965, 0.2663511335849762, 0.5341154336929321, 0.6416176557540894, 0.1813349574804306, 0.38762664794921875, -0.5636938214302063, 0.7998458743095398, -1.0808109045028687, -0.7341752648353577, -0.9122663140296936, 0.4605703055858612, -0.32330191135406494, -0.8597680926322937, 0.9931221008300781, 1.0540446043014526, 0.8790794014930725, -0.027426518499851227, 0.6356104016304016, -0.39996981620788574, 0.20813019573688507, -0.398213654756546, 0.9446443915367126, -0.8405114412307739, -0.21401868760585785, -0.2452143281698227, -0.7189437747001648, -0.38091787695884705, 0.8268977999687195, -0.1601184904575348, 0.04139494523406029, 1.1075527667999268, 0.6672455072402954, -0.14095675945281982, 0.02470560371875763, -0.060085371136665344, 0.5351691246032715, 0.397209107875824, 0.991599440574646, 0.621118426322937, -0.7995114922523499, 0.36645403504371643, -0.4828210473060608, -0.39641955494880676, -0.3826885223388672, -0.45385006070137024, -0.8564701676368713, -0.5028502345085144, -0.22196334600448608, -0.6425746083259583, -0.10232130438089371, 0.9861304759979248, 0.44556695222854614, -0.8997455835342407, -0.43498432636260986, -0.11423702538013458, 0.1645464152097702, -0.5836102962493896, -0.42474788427352905, 0.7123851776123047, -0.10593845695257187, -0.6082488894462585, 0.20062623918056488, -0.1400885134935379, 0.23488445580005646, 0.12149474024772644, -0.40780776739120483, -0.728209912776947, 0.007021699566394091, 0.41580650210380554, 0.37061601877212524, -0.6811679601669312, -0.7290666103363037, 0.29754215478897095, -0.544838011264801, 0.44650864601135254, 0.004454859532415867, -0.5391414761543274, 0.044213954359292984, 0.7255486249923706, 0.466514527797699, 0.6948343515396118, 0.0004834741121158004, 0.09889131039381027, -0.6788352131843567, 0.20081458985805511, -0.027242116630077362, 0.262893944978714, -0.03056582249701023, -0.3205852210521698, 0.8085361123085022, 0.6822348833084106, -0.5401382446289062, -1.0878063440322876, -0.4539782702922821, -1.467445969581604, 0.0070729684084653854, 1.110190749168396, 0.04292890802025795, -0.4739507734775543, 0.27253076434135437, -0.12596189975738525, 0.22414684295654297, -0.323150634765625, 0.7409012913703918, 0.7997588515281677, -0.37612542510032654, 0.10320034623146057, -0.6158904433250427, 0.3521154522895813, 0.5239660143852234, -1.1929538249969482, -0.09654614329338074, 0.2527039647102356, 0.30731967091560364, 0.3516545295715332, 0.6082574725151062, -0.11644051223993301, 0.26355189085006714, 0.20815202593803406, 0.05208185687661171, -0.033813148736953735, 0.04781078174710274, -0.2587575316429138, 0.08491054177284241, -0.2579575479030609, -0.45774203538894653 ]
open-llm-leaderboard/details_yeontaek__llama-2-70b-IA3-guanaco
open-llm-leaderboard
2023-10-23T01:35:16Z
201
0
[ "region:us" ]
null
2023-08-18T18:54:12Z
--- pretty_name: Evaluation run of yeontaek/llama-2-70b-IA3-guanaco dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yeontaek/llama-2-70b-IA3-guanaco](https://huggingface.co/yeontaek/llama-2-70b-IA3-guanaco)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yeontaek__llama-2-70b-IA3-guanaco\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-23T01:35:02.299684](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-70b-IA3-guanaco/blob/main/results_2023-10-23T01-35-02.299684.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.059354026845637585,\n\ \ \"em_stderr\": 0.0024197909382591906,\n \"f1\": 0.12265834731543575,\n\ \ \"f1_stderr\": 0.0026243794222964158,\n \"acc\": 0.5548770235038503,\n\ \ \"acc_stderr\": 0.011602676960733152\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.059354026845637585,\n \"em_stderr\": 0.0024197909382591906,\n\ \ \"f1\": 0.12265834731543575,\n \"f1_stderr\": 0.0026243794222964158\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.287338893100834,\n \ \ \"acc_stderr\": 0.012464677060107086\n },\n \"harness|winogrande|5\":\ \ {\n \"acc\": 0.8224151539068666,\n \"acc_stderr\": 0.01074067686135922\n\ \ }\n}\n```" repo_url: https://huggingface.co/yeontaek/llama-2-70b-IA3-guanaco leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|arc:challenge|25_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-18T03:44:14.521953.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_23T01_35_02.299684 path: - '**/details_harness|drop|3_2023-10-23T01-35-02.299684.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-23T01-35-02.299684.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_23T01_35_02.299684 path: - '**/details_harness|gsm8k|5_2023-10-23T01-35-02.299684.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-23T01-35-02.299684.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hellaswag|10_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-18T03:44:14.521953.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-management|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T03:44:14.521953.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_18T03_44_14.521953 path: - '**/details_harness|truthfulqa:mc|0_2023-08-18T03:44:14.521953.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-18T03:44:14.521953.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_23T01_35_02.299684 path: - '**/details_harness|winogrande|5_2023-10-23T01-35-02.299684.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-23T01-35-02.299684.parquet' - config_name: results data_files: - split: 2023_08_18T03_44_14.521953 path: - results_2023-08-18T03:44:14.521953.parquet - split: 2023_10_23T01_35_02.299684 path: - results_2023-10-23T01-35-02.299684.parquet - split: latest path: - results_2023-10-23T01-35-02.299684.parquet --- # Dataset Card for Evaluation run of yeontaek/llama-2-70b-IA3-guanaco ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yeontaek/llama-2-70b-IA3-guanaco - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yeontaek/llama-2-70b-IA3-guanaco](https://huggingface.co/yeontaek/llama-2-70b-IA3-guanaco) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yeontaek__llama-2-70b-IA3-guanaco", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T01:35:02.299684](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-70b-IA3-guanaco/blob/main/results_2023-10-23T01-35-02.299684.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.059354026845637585, "em_stderr": 0.0024197909382591906, "f1": 0.12265834731543575, "f1_stderr": 0.0026243794222964158, "acc": 0.5548770235038503, "acc_stderr": 0.011602676960733152 }, "harness|drop|3": { "em": 0.059354026845637585, "em_stderr": 0.0024197909382591906, "f1": 0.12265834731543575, "f1_stderr": 0.0026243794222964158 }, "harness|gsm8k|5": { "acc": 0.287338893100834, "acc_stderr": 0.012464677060107086 }, "harness|winogrande|5": { "acc": 0.8224151539068666, "acc_stderr": 0.01074067686135922 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3446251153945923, -0.6040946841239929, 0.259232759475708, 0.18205951154232025, -0.25702252984046936, 0.09647523611783981, -0.3674345910549164, -0.3262263238430023, 0.4649762213230133, 0.505929708480835, -0.7335599660873413, -0.9799594879150391, -0.6850886344909668, 0.17111721634864807, -0.15047594904899597, 1.1958357095718384, -0.24062016606330872, -0.211417093873024, 0.08303339034318924, -0.3613629937171936, -0.37511685490608215, -0.379862517118454, -0.3963052034378052, -0.44610121846199036, 0.38753387331962585, 0.6462231874465942, 0.32878828048706055, 0.6949541568756104, 0.6083024144172668, 0.4112909436225891, -0.13495174050331116, 0.21625497937202454, -0.4388044476509094, -0.08573157340288162, 0.25090664625167847, -0.639549970626831, -0.7949730753898621, 0.10875899344682693, 0.6044296622276306, 0.44798338413238525, -0.1795179843902588, 0.6799061298370361, 0.09155970811843872, 0.5434622168540955, -0.45093241333961487, 0.42575693130493164, -0.35672616958618164, -0.11943306028842926, -0.44550803303718567, -0.08616126328706741, 0.04502969980239868, -0.43684154748916626, -0.17138701677322388, -0.5617311596870422, 0.12659503519535065, 0.055899690836668015, 1.1104286909103394, 0.1253105252981186, -0.15223580598831177, -0.21105468273162842, -0.24968139827251434, 0.9228039979934692, -0.9519999623298645, -0.06702008098363876, 0.7906346917152405, 0.13331753015518188, -0.29573854804039, -0.6544995307922363, -0.447797954082489, -0.13682495057582855, -0.25339940190315247, 0.18577322363853455, 0.0515131913125515, -0.1916039139032364, 0.327345609664917, 0.6120952367782593, -0.6472020149230957, 0.05777624249458313, -0.5849698781967163, -0.18467029929161072, 0.9483333826065063, 0.3904777467250824, 0.10235323756933212, -0.5340301990509033, -0.337069571018219, -0.34692779183387756, -0.38552120327949524, 0.24040745198726654, 0.5390412211418152, 0.4222011864185333, -0.6444852352142334, 0.861678957939148, -0.5110737085342407, 0.49670159816741943, -0.13079549372196198, -0.40958690643310547, 0.8515040278434753, -0.5927099585533142, -0.17284898459911346, -0.05298600345849991, 1.0520216226577759, 0.4828026294708252, -0.03137723729014397, 0.1681629866361618, -0.2690131664276123, 0.00692433537915349, 0.07319406419992447, -0.6931371688842773, -0.09809510409832001, 0.42333367466926575, -0.590031087398529, -0.4017679989337921, 0.22691605985164642, -0.9375361800193787, -0.09454458951950073, -0.2585544288158417, 0.22333888709545135, -0.1326735019683838, -0.33942711353302, -0.0843687355518341, -0.1050972118973732, 0.24774616956710815, 0.15258604288101196, -0.579063355922699, 0.33674949407577515, 0.6171000003814697, 1.0638607740402222, -0.051358290016651154, -0.41522738337516785, -0.3484053909778595, -0.24037842452526093, -0.20616577565670013, 0.5215421915054321, -0.10749448835849762, -0.4357250928878784, -0.12373026460409164, 0.31784242391586304, -0.2335267961025238, -0.5602835416793823, 0.7080745100975037, -0.32088136672973633, 0.22661501169204712, -0.3140217065811157, -0.3472178280353546, -0.1574869304895401, 0.3963916301727295, -0.6658245921134949, 1.46457839012146, 0.38866621255874634, -0.8510375022888184, 0.0736336037516594, -0.7787938117980957, -0.2604071795940399, 0.0729551762342453, 0.07800864428281784, -0.6349208354949951, -0.16596350073814392, 0.09819996356964111, 0.5594809055328369, -0.43079739809036255, 0.059932321310043335, -0.2999153435230255, -0.3702486455440521, 0.11439381539821625, 0.011022533290088177, 1.0669349431991577, 0.17772440612316132, -0.47723156213760376, 0.16156378388404846, -1.0171427726745605, 0.2011016607284546, 0.42269688844680786, -0.5432589054107666, -0.2392958402633667, -0.2702690362930298, 0.12444065511226654, 0.15351882576942444, 0.5618827939033508, -0.5063201189041138, 0.4132363200187683, -0.1604284793138504, 0.39311420917510986, 0.9853673577308655, -0.00761779211461544, 0.2315215915441513, -0.4494350552558899, 0.5206210613250732, -0.10805470496416092, 0.2878013849258423, 0.16815312206745148, -0.6397892236709595, -0.7669609189033508, -0.2160251885652542, 0.083078533411026, 0.6678016185760498, -0.4531443119049072, 0.7782534956932068, -0.3843994140625, -0.7238337993621826, -0.6564927697181702, 0.17138522863388062, 0.4338347315788269, 0.6934648156166077, 0.4323011040687561, -0.3223038613796234, -0.6744920611381531, -1.0280424356460571, 0.15060484409332275, -0.40242108702659607, 0.07269588857889175, 0.4388667345046997, 1.0624539852142334, -0.32661598920822144, 0.6191735863685608, -0.7686339616775513, -0.39504557847976685, -0.2744658887386322, 0.060354236513376236, 0.6851465106010437, 0.4380235970020294, 0.4887048006057739, -0.5912925601005554, -0.32824277877807617, 0.011999120935797691, -0.8207302689552307, -0.21748194098472595, -0.03460872545838356, -0.2590334415435791, 0.27547332644462585, -0.07349424809217453, -0.5145514011383057, 0.5413747429847717, 0.5319121479988098, -0.5249868631362915, 0.7226322293281555, -0.09095969796180725, 0.44877883791923523, -1.1745009422302246, 0.20984135568141937, 0.07831832766532898, 0.011392004787921906, -0.4693809151649475, -0.12328679859638214, 0.04214530438184738, 0.424216091632843, -0.39388832449913025, 0.5516635775566101, -0.3501201868057251, -0.1916445940732956, 0.004998011980205774, 0.11238200962543488, -0.06275177747011185, 0.5581064820289612, -0.28018611669540405, 0.8472089171409607, 0.45127150416374207, -0.3506024479866028, 0.442528635263443, 0.48507922887802124, -0.6336223483085632, 0.2956728935241699, -0.5866654515266418, 0.1372278332710266, 0.16031502187252045, 0.23310931026935577, -0.9408705830574036, -0.4378810524940491, 0.5057883262634277, -0.45599567890167236, 0.10766741633415222, -0.3322688341140747, -0.5704059600830078, -0.5522496104240417, -0.5940731763839722, 0.2372511327266693, 0.46582654118537903, -0.4934649169445038, 0.23599830269813538, 0.4153275787830353, 0.026340730488300323, -0.7136400938034058, -0.7561010122299194, -0.0859651118516922, -0.47498539090156555, -0.6831278800964355, 0.39354950189590454, -0.2081063985824585, -0.3048083186149597, 0.023677730932831764, -0.05166252702474594, -0.07881323248147964, 0.18475046753883362, 0.3994431495666504, 0.6471198201179504, -0.11062483489513397, -0.3303629159927368, -0.2728137969970703, -0.1330854892730713, 0.08908075839281082, 0.04290676862001419, 0.6070928573608398, -0.30961722135543823, -0.2064303457736969, -0.3302379548549652, 0.14558789134025574, 0.43028274178504944, -0.21782205998897552, 0.8993605375289917, 0.6577039361000061, -0.2523435950279236, 0.021058572456240654, -0.39898043870925903, 0.06824532896280289, -0.47519925236701965, 0.37304291129112244, -0.3329022228717804, -0.7695870995521545, 0.9182995557785034, 0.2257392853498459, 0.1331264078617096, 0.7726119160652161, 0.6371381878852844, 0.026362590491771698, 0.6896477937698364, 0.23967376351356506, -0.28997430205345154, 0.5034424066543579, -0.8466911315917969, -0.1022055372595787, -1.0963711738586426, -0.5089000463485718, -0.5770554542541504, -0.42177480459213257, -0.7708260416984558, -0.33038583397865295, 0.2272602766752243, 0.21837599575519562, -0.38116157054901123, 0.6051778793334961, -0.5668344497680664, 0.2380741983652115, 0.6134942770004272, 0.16506339609622955, 0.07181096822023392, -0.12364338338375092, -0.009114546701312065, 0.2470855861902237, -0.4164072871208191, -0.4606204330921173, 1.4398552179336548, 0.24486343562602997, 0.5927904844284058, 0.0602046363055706, 0.97706139087677, 0.3352867066860199, 0.26946747303009033, -0.5486185550689697, 0.6372296214103699, 0.04568692296743393, -0.4957391619682312, -0.16689439117908478, -0.5655320882797241, -0.9066154956817627, 0.15519918501377106, 0.05508700758218765, -1.0584486722946167, 0.08552488684654236, -0.025829270482063293, -0.029480386525392532, 0.2951717674732208, -0.6150012612342834, 0.7870064377784729, -0.26929089426994324, -0.40309634804725647, -0.0567154698073864, -0.8517680764198303, 0.4788430333137512, 0.06583549827337265, 0.32513391971588135, -0.2950735092163086, -0.0263876561075449, 1.159502387046814, -0.6597586870193481, 0.7540077567100525, -0.190973699092865, 0.019939295947551727, 0.5153222680091858, -0.40268272161483765, 0.6789105534553528, 0.06865759193897247, -0.24401122331619263, 0.5391867160797119, -0.24454553425312042, -0.28569844365119934, -0.23314929008483887, 0.9499562382698059, -0.947395920753479, -0.37752780318260193, -0.4236981272697449, -0.5240679979324341, 0.20631061494350433, 0.19797693192958832, 0.39241403341293335, 0.21371783316135406, 0.07574920356273651, 0.11906901746988297, 0.19036637246608734, -0.08710062503814697, 0.5100817680358887, 0.3550682067871094, -0.05077647790312767, -0.6889641284942627, 0.717638373374939, 0.2657971978187561, 0.07820472121238708, 0.07381022721529007, 0.06384831666946411, -0.5552071332931519, -0.47740811109542847, -0.40240034461021423, 0.336774080991745, -0.5823339819908142, -0.4212360978126526, -0.4031294584274292, -0.21202567219734192, -0.3470355272293091, 0.10151614993810654, -0.4364803731441498, -0.5062907338142395, -0.47850751876831055, -0.2765539288520813, 0.6445438265800476, 0.6191927790641785, -0.36416080594062805, 0.37157854437828064, -0.6516992449760437, 0.34417471289634705, -0.07337493449449539, 0.43131715059280396, -0.09708426147699356, -0.6177186965942383, -0.45745471119880676, 0.10140272974967957, -0.3258804380893707, -0.9290273785591125, 0.5675702691078186, -0.008683212101459503, 0.6500651240348816, 0.16956199705600739, 0.07603337615728378, 0.8287042379379272, -0.2170305848121643, 1.0552068948745728, 0.03587561473250389, -0.691402792930603, 0.795354962348938, -0.23231260478496552, 0.057008855044841766, 0.5324091911315918, 0.19261988997459412, -0.47499874234199524, -0.1938329041004181, -0.8596929311752319, -1.1647685766220093, 1.1116061210632324, 0.539500892162323, -0.31935977935791016, 0.11905169486999512, 0.3403922915458679, -0.035199493169784546, 0.1461057960987091, -0.6873629689216614, -0.7888321280479431, -0.24718080461025238, -0.27413085103034973, -0.10200930386781693, -0.08536841720342636, -0.4773121774196625, -0.35099682211875916, 0.8908504247665405, -0.07114790380001068, 0.4683659076690674, 0.13984856009483337, -0.11656279861927032, -0.09088517725467682, 0.2908499240875244, 0.6096829175949097, 0.6798271536827087, -0.43928441405296326, -0.06092308461666107, 0.2728208005428314, -0.6614869832992554, 0.09861904382705688, 0.3298347592353821, 0.005937684793025255, -0.17701950669288635, 0.7147717475891113, 0.9268600344657898, -0.08279068022966385, -0.4152781665325165, 0.40563827753067017, 0.031849026679992676, -0.3077735900878906, -0.41000112891197205, 0.2011641263961792, -0.012754536233842373, 0.44694310426712036, 0.508788526058197, -0.12416903674602509, -0.014338062144815922, -0.30552953481674194, 0.1698015332221985, 0.25697997212409973, -0.0134135065600276, -0.4116742014884949, 0.5780608654022217, -0.12205522507429123, -0.3411931097507477, 0.710856020450592, -0.1726183295249939, -0.6300628781318665, 1.1339858770370483, 0.380872517824173, 0.8339417576789856, -0.11225856095552444, 0.13242587447166443, 0.6191470623016357, 0.3183016777038574, -0.26042890548706055, 0.5194859504699707, 0.1054462119936943, -0.5703684687614441, -0.2662171721458435, -0.6911263465881348, -0.09475984424352646, 0.521465003490448, -1.0085543394088745, 0.28590646386146545, -0.15993183851242065, -0.346966415643692, -0.18305087089538574, 0.5007665753364563, -0.8576371073722839, 0.17701372504234314, -0.025789296254515648, 0.7670112252235413, -1.066801905632019, 0.717893660068512, 0.8345081210136414, -0.4596805274486542, -0.9301759600639343, -0.2352023720741272, 0.14322851598262787, -0.9264010787010193, 0.5020833015441895, 0.2269519865512848, 0.3456133008003235, -0.22582124173641205, -0.6598963141441345, -1.0133529901504517, 1.5874485969543457, 0.10511130094528198, -0.5766062140464783, 0.2328103929758072, 0.14522142708301544, 0.43041953444480896, -0.296774297952652, 0.5645275115966797, 0.756712019443512, 0.7279357314109802, -0.11574962735176086, -0.9475969076156616, 0.2082734853029251, -0.468301922082901, -0.09060817956924438, 0.24681201577186584, -0.9747915863990784, 0.9896953701972961, -0.23687468469142914, 0.024675969034433365, 0.03818122297525406, 0.46415266394615173, 0.5524031519889832, 0.3275361657142639, 0.49064940214157104, 0.696605384349823, 0.7635156512260437, -0.3147622048854828, 1.0560928583145142, -0.2063024342060089, 0.7931948304176331, 1.1089160442352295, 0.0144411725923419, 0.7081140875816345, 0.38799071311950684, -0.5160605907440186, 0.5775365829467773, 0.8226175904273987, -0.30659469962120056, 0.5509442687034607, 0.10787481814622879, -0.07305590063333511, 0.02351471036672592, -0.07587071508169174, -0.5223202705383301, 0.45322856307029724, 0.16612106561660767, -0.424407035112381, -0.142070934176445, -0.3639634847640991, 0.09227588772773743, -0.2809794247150421, -0.2290952205657959, 0.5532320737838745, -0.005035506561398506, -0.4782034754753113, 0.7114944458007812, 0.0576770082116127, 0.6605964303016663, -0.7240549921989441, -0.12119118869304657, -0.33306923508644104, 0.12322261929512024, -0.515247642993927, -1.009628176689148, 0.16323158144950867, 0.1671811044216156, -0.219660222530365, -0.12007896602153778, 0.6551859378814697, -0.22084152698516846, -0.5072426795959473, 0.46709832549095154, 0.3872533142566681, 0.2621985673904419, 0.1854267567396164, -0.869877815246582, 0.2145475596189499, 0.317058265209198, -0.8102005124092102, 0.41455012559890747, 0.22302232682704926, -0.07489366829395294, 0.5992838144302368, 0.7304369807243347, 0.1252059042453766, 0.08800189197063446, 0.009535100311040878, 1.107483148574829, -0.7556273937225342, -0.39558324217796326, -0.8015126585960388, 0.9062239527702332, -0.21401990950107574, -0.6975798010826111, 0.7734020948410034, 0.8741333484649658, 0.7807427048683167, 0.10539700090885162, 0.7669596672058105, -0.504706859588623, 0.382953941822052, -0.37906739115715027, 0.7897628545761108, -0.7326974868774414, 0.37900254130363464, -0.17487585544586182, -0.8322162628173828, 0.02619181200861931, 0.7113315463066101, -0.21706746518611908, -0.022324511781334877, 0.5416923761367798, 0.908103883266449, 0.058911554515361786, 0.0511898472905159, -0.13494320213794708, 0.3908093571662903, 0.30885860323905945, 0.6670194864273071, 0.6857428550720215, -0.7176944017410278, 0.4690743386745453, -0.6795833706855774, -0.3868594765663147, -0.17706727981567383, -0.6890704035758972, -0.8196530342102051, -0.5938189029693604, -0.3192688226699829, -0.5553112626075745, -0.05096697062253952, 0.9659019708633423, 0.46981772780418396, -0.8339304327964783, -0.43544772267341614, 0.07187579572200775, 0.19808419048786163, -0.20197463035583496, -0.3184873163700104, 0.6695562601089478, -0.03430277109146118, -0.7603636384010315, 0.3923185169696808, -0.1587969809770584, -0.11470448225736618, 0.037914928048849106, -0.3083106279373169, -0.381753146648407, -0.39316830039024353, 0.3660624921321869, 0.21836937963962555, -0.6513535976409912, -0.36172518134117126, -0.09941740334033966, -0.07500813901424408, 0.3609890341758728, 0.26943305134773254, -0.6481137871742249, 0.0028489590622484684, 0.537951648235321, 0.21017798781394958, 0.729053258895874, 0.04811651632189751, 0.13333535194396973, -0.7131279110908508, 0.10357616096735, -0.00939207710325718, 0.4511840343475342, 0.19281607866287231, -0.4392533004283905, 1.0114880800247192, 0.3360804617404938, -0.6700689792633057, -0.8899689316749573, -0.2330130785703659, -1.2109012603759766, 0.009136001579463482, 1.3418728113174438, -0.3280094563961029, -0.3627653121948242, 0.08358294516801834, -0.14217326045036316, 0.39265501499176025, -0.710597813129425, 0.6275099515914917, 0.7127018570899963, -0.29105913639068604, 0.06950747966766357, -0.6604578495025635, 0.2895892262458801, 0.10242186486721039, -1.0665161609649658, -0.10296447575092316, 0.3131013512611389, 0.42037233710289, 0.2542402446269989, 0.6286038160324097, 0.03730682283639908, -0.16151948273181915, -0.09801992774009705, 0.16223584115505219, -0.2160855233669281, -0.09135309606790543, -0.20942679047584534, 0.06874455511569977, -0.30564719438552856, -0.5356700420379639 ]
open-llm-leaderboard/details_h2oai__h2ogpt-research-oasst1-llama-65b
open-llm-leaderboard
2023-08-27T12:41:44Z
201
0
[ "region:us" ]
null
2023-08-18T18:56:43Z
--- pretty_name: Evaluation run of h2oai/h2ogpt-research-oasst1-llama-65b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [h2oai/h2ogpt-research-oasst1-llama-65b](https://huggingface.co/h2oai/h2ogpt-research-oasst1-llama-65b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_h2oai__h2ogpt-research-oasst1-llama-65b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-17T22:10:29.981773](https://huggingface.co/datasets/open-llm-leaderboard/details_h2oai__h2ogpt-research-oasst1-llama-65b/blob/main/results_2023-08-17T22%3A10%3A29.981773.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6359037673839993,\n\ \ \"acc_stderr\": 0.0329346816196445,\n \"acc_norm\": 0.6396809356138717,\n\ \ \"acc_norm_stderr\": 0.03290965482744071,\n \"mc1\": 0.34394124847001223,\n\ \ \"mc1_stderr\": 0.01662908751427678,\n \"mc2\": 0.48845185520886875,\n\ \ \"mc2_stderr\": 0.014057830912491135\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6177474402730375,\n \"acc_stderr\": 0.014200454049979275,\n\ \ \"acc_norm\": 0.6476109215017065,\n \"acc_norm_stderr\": 0.01396014260059868\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6664011153156741,\n\ \ \"acc_stderr\": 0.004705347137699622,\n \"acc_norm\": 0.8593905596494722,\n\ \ \"acc_norm_stderr\": 0.0034690778470563765\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n\ \ \"acc_stderr\": 0.042849586397534015,\n \"acc_norm\": 0.562962962962963,\n\ \ \"acc_norm_stderr\": 0.042849586397534015\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03523807393012047,\n \ \ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03523807393012047\n \ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n\ \ \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \ \ \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6339622641509434,\n \"acc_stderr\": 0.029647813539365245,\n\ \ \"acc_norm\": 0.6339622641509434,\n \"acc_norm_stderr\": 0.029647813539365245\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7083333333333334,\n\ \ \"acc_stderr\": 0.03800968060554858,\n \"acc_norm\": 0.7083333333333334,\n\ \ \"acc_norm_stderr\": 0.03800968060554858\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \ \ \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n\ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n\ \ \"acc_stderr\": 0.03789401760283648,\n \"acc_norm\": 0.5549132947976878,\n\ \ \"acc_norm_stderr\": 0.03789401760283648\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n\ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n\ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03202563076101737,\n \ \ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03202563076101737\n },\n\ \ \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.38596491228070173,\n\ \ \"acc_stderr\": 0.04579639422070434,\n \"acc_norm\": 0.38596491228070173,\n\ \ \"acc_norm_stderr\": 0.04579639422070434\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n\ \ \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3783068783068783,\n \"acc_stderr\": 0.024976954053155254,\n \"\ acc_norm\": 0.3783068783068783,\n \"acc_norm_stderr\": 0.024976954053155254\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n\ \ \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n\ \ \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7483870967741936,\n\ \ \"acc_stderr\": 0.024685979286239963,\n \"acc_norm\": 0.7483870967741936,\n\ \ \"acc_norm_stderr\": 0.024685979286239963\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4039408866995074,\n \"acc_stderr\": 0.0345245390382204,\n\ \ \"acc_norm\": 0.4039408866995074,\n \"acc_norm_stderr\": 0.0345245390382204\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526066,\n \"acc_norm\"\ : 0.67,\n \"acc_norm_stderr\": 0.047258156262526066\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8,\n \"acc_stderr\": 0.031234752377721164,\n \ \ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.031234752377721164\n \ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.803030303030303,\n \"acc_stderr\": 0.028335609732463355,\n \"\ acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463355\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593542,\n\ \ \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593542\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6256410256410256,\n \"acc_stderr\": 0.024537591572830513,\n\ \ \"acc_norm\": 0.6256410256410256,\n \"acc_norm_stderr\": 0.024537591572830513\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \ \ \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.030489911417673227,\n\ \ \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.030489911417673227\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3973509933774834,\n \"acc_stderr\": 0.0399552400768168,\n \"acc_norm\"\ : 0.3973509933774834,\n \"acc_norm_stderr\": 0.0399552400768168\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8275229357798165,\n\ \ \"acc_stderr\": 0.016197807956848043,\n \"acc_norm\": 0.8275229357798165,\n\ \ \"acc_norm_stderr\": 0.016197807956848043\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653062,\n\ \ \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653062\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931055,\n \"\ acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931055\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8396624472573839,\n \"acc_stderr\": 0.02388438092596567,\n \ \ \"acc_norm\": 0.8396624472573839,\n \"acc_norm_stderr\": 0.02388438092596567\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n\ \ \"acc_stderr\": 0.030769352008229146,\n \"acc_norm\": 0.6995515695067265,\n\ \ \"acc_norm_stderr\": 0.030769352008229146\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"\ acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n\ \ \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n\ \ \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742179,\n\ \ \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742179\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n\ \ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.5178571428571429,\n\ \ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573974,\n\ \ \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573974\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n\ \ \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n\ \ \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8148148148148148,\n\ \ \"acc_stderr\": 0.013890862162876166,\n \"acc_norm\": 0.8148148148148148,\n\ \ \"acc_norm_stderr\": 0.013890862162876166\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.02418242749657761,\n\ \ \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.02418242749657761\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4770949720670391,\n\ \ \"acc_stderr\": 0.016704945740326188,\n \"acc_norm\": 0.4770949720670391,\n\ \ \"acc_norm_stderr\": 0.016704945740326188\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.026787453111906497,\n\ \ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.026787453111906497\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7459807073954984,\n\ \ \"acc_stderr\": 0.0247238615047717,\n \"acc_norm\": 0.7459807073954984,\n\ \ \"acc_norm_stderr\": 0.0247238615047717\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904212,\n\ \ \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904212\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \ \ \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4830508474576271,\n\ \ \"acc_stderr\": 0.01276289688921086,\n \"acc_norm\": 0.4830508474576271,\n\ \ \"acc_norm_stderr\": 0.01276289688921086\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.6066176470588235,\n \"acc_stderr\": 0.029674288281311155,\n\ \ \"acc_norm\": 0.6066176470588235,\n \"acc_norm_stderr\": 0.029674288281311155\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.673202614379085,\n \"acc_stderr\": 0.018975427920507205,\n \ \ \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.018975427920507205\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n\ \ \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n\ \ \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6653061224489796,\n \"acc_stderr\": 0.030209235226242304,\n\ \ \"acc_norm\": 0.6653061224489796,\n \"acc_norm_stderr\": 0.030209235226242304\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n\ \ \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n\ \ \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896308,\n \ \ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896308\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n\ \ \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n\ \ \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n\ \ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.34394124847001223,\n\ \ \"mc1_stderr\": 0.01662908751427678,\n \"mc2\": 0.48845185520886875,\n\ \ \"mc2_stderr\": 0.014057830912491135\n }\n}\n```" repo_url: https://huggingface.co/h2oai/h2ogpt-research-oasst1-llama-65b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|arc:challenge|25_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|arc:challenge|25_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hellaswag|10_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hellaswag|10_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T17:53:50.635044.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T22:10:29.981773.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:10:29.981773.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T17_53_50.635044 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T17:53:50.635044.parquet' - split: 2023_08_17T22_10_29.981773 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T22:10:29.981773.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T22:10:29.981773.parquet' - config_name: results data_files: - split: 2023_08_17T17_53_50.635044 path: - results_2023-08-17T17:53:50.635044.parquet - split: 2023_08_17T22_10_29.981773 path: - results_2023-08-17T22:10:29.981773.parquet - split: latest path: - results_2023-08-17T22:10:29.981773.parquet --- # Dataset Card for Evaluation run of h2oai/h2ogpt-research-oasst1-llama-65b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/h2oai/h2ogpt-research-oasst1-llama-65b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [h2oai/h2ogpt-research-oasst1-llama-65b](https://huggingface.co/h2oai/h2ogpt-research-oasst1-llama-65b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_h2oai__h2ogpt-research-oasst1-llama-65b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T22:10:29.981773](https://huggingface.co/datasets/open-llm-leaderboard/details_h2oai__h2ogpt-research-oasst1-llama-65b/blob/main/results_2023-08-17T22%3A10%3A29.981773.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6359037673839993, "acc_stderr": 0.0329346816196445, "acc_norm": 0.6396809356138717, "acc_norm_stderr": 0.03290965482744071, "mc1": 0.34394124847001223, "mc1_stderr": 0.01662908751427678, "mc2": 0.48845185520886875, "mc2_stderr": 0.014057830912491135 }, "harness|arc:challenge|25": { "acc": 0.6177474402730375, "acc_stderr": 0.014200454049979275, "acc_norm": 0.6476109215017065, "acc_norm_stderr": 0.01396014260059868 }, "harness|hellaswag|10": { "acc": 0.6664011153156741, "acc_stderr": 0.004705347137699622, "acc_norm": 0.8593905596494722, "acc_norm_stderr": 0.0034690778470563765 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.042849586397534015, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.042849586397534015 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.75, "acc_stderr": 0.03523807393012047, "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6339622641509434, "acc_stderr": 0.029647813539365245, "acc_norm": 0.6339622641509434, "acc_norm_stderr": 0.029647813539365245 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7083333333333334, "acc_stderr": 0.03800968060554858, "acc_norm": 0.7083333333333334, "acc_norm_stderr": 0.03800968060554858 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5549132947976878, "acc_stderr": 0.03789401760283648, "acc_norm": 0.5549132947976878, "acc_norm_stderr": 0.03789401760283648 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201942, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201942 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6, "acc_stderr": 0.03202563076101737, "acc_norm": 0.6, "acc_norm_stderr": 0.03202563076101737 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.38596491228070173, "acc_stderr": 0.04579639422070434, "acc_norm": 0.38596491228070173, "acc_norm_stderr": 0.04579639422070434 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3783068783068783, "acc_stderr": 0.024976954053155254, "acc_norm": 0.3783068783068783, "acc_norm_stderr": 0.024976954053155254 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7483870967741936, "acc_stderr": 0.024685979286239963, "acc_norm": 0.7483870967741936, "acc_norm_stderr": 0.024685979286239963 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.047258156262526066, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526066 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8, "acc_stderr": 0.031234752377721164, "acc_norm": 0.8, "acc_norm_stderr": 0.031234752377721164 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463355, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463355 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8756476683937824, "acc_stderr": 0.023814477086593542, "acc_norm": 0.8756476683937824, "acc_norm_stderr": 0.023814477086593542 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6256410256410256, "acc_stderr": 0.024537591572830513, "acc_norm": 0.6256410256410256, "acc_norm_stderr": 0.024537591572830513 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.030489911417673227, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.030489911417673227 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3973509933774834, "acc_stderr": 0.0399552400768168, "acc_norm": 0.3973509933774834, "acc_norm_stderr": 0.0399552400768168 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8275229357798165, "acc_stderr": 0.016197807956848043, "acc_norm": 0.8275229357798165, "acc_norm_stderr": 0.016197807956848043 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653062, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653062 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.026156867523931055, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.026156867523931055 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8396624472573839, "acc_stderr": 0.02388438092596567, "acc_norm": 0.8396624472573839, "acc_norm_stderr": 0.02388438092596567 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229146, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229146 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7404580152671756, "acc_stderr": 0.03844876139785271, "acc_norm": 0.7404580152671756, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742179, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742179 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.047427623612430116, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573974, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573974 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8148148148148148, "acc_stderr": 0.013890862162876166, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.013890862162876166 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.02418242749657761, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.02418242749657761 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4770949720670391, "acc_stderr": 0.016704945740326188, "acc_norm": 0.4770949720670391, "acc_norm_stderr": 0.016704945740326188 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6764705882352942, "acc_stderr": 0.026787453111906497, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.026787453111906497 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7459807073954984, "acc_stderr": 0.0247238615047717, "acc_norm": 0.7459807073954984, "acc_norm_stderr": 0.0247238615047717 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.02399350170904212, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.02399350170904212 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4830508474576271, "acc_stderr": 0.01276289688921086, "acc_norm": 0.4830508474576271, "acc_norm_stderr": 0.01276289688921086 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6066176470588235, "acc_stderr": 0.029674288281311155, "acc_norm": 0.6066176470588235, "acc_norm_stderr": 0.029674288281311155 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.673202614379085, "acc_stderr": 0.018975427920507205, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.018975427920507205 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940589, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940589 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6653061224489796, "acc_stderr": 0.030209235226242304, "acc_norm": 0.6653061224489796, "acc_norm_stderr": 0.030209235226242304 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896308, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896308 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.34394124847001223, "mc1_stderr": 0.01662908751427678, "mc2": 0.48845185520886875, "mc2_stderr": 0.014057830912491135 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6855427026748657, -0.8281185626983643, 0.2745996415615082, 0.20443959534168243, -0.15031349658966064, -0.037143293768167496, 0.023485232144594193, -0.24038171768188477, 0.553355872631073, -0.08831003308296204, -0.467659592628479, -0.6591634750366211, -0.4594847857952118, 0.21651887893676758, 0.022444315254688263, 0.8125171065330505, -0.20845144987106323, -0.12943656742572784, 0.07061231881380081, -0.03938882797956467, -0.248819500207901, -0.339647501707077, -0.5116793513298035, -0.3561283051967621, 0.1798577606678009, 0.44346293807029724, 0.42100954055786133, 0.8025307655334473, 0.7074468731880188, 0.2708432674407959, -0.3064281642436981, -0.03460661694407463, -0.19813430309295654, -0.30564361810684204, 0.39156922698020935, -0.32145482301712036, -0.8599485158920288, 0.3027782142162323, 0.7569053769111633, 0.6403002142906189, -0.07503972947597504, 0.3362131416797638, 0.06556937098503113, 0.5864645838737488, -0.38899266719818115, 0.04704511538147926, -0.264353483915329, 0.21202033758163452, -0.16662628948688507, -0.2856140434741974, -0.30632126331329346, -0.2159550040960312, -0.08462033420801163, -0.8527272343635559, 0.23660655319690704, 0.35442253947257996, 1.5653647184371948, -0.10868065804243088, -0.26557204127311707, 0.09696675091981888, -0.09846311062574387, 1.0212191343307495, -0.9051100611686707, 0.33346667885780334, 0.8000204563140869, 0.09865959733724594, -0.17801299691200256, -0.5483854413032532, -0.6203996539115906, 0.0726127102971077, -0.3985497057437897, 0.3529810607433319, -0.06386390328407288, -0.197068452835083, 0.3470921218395233, 0.6443949341773987, -0.703049898147583, 0.18357202410697937, -0.6594078540802002, -0.17748978734016418, 1.0724763870239258, 0.32739681005477905, 0.03102065809071064, -0.3654434382915497, -0.712740421295166, -0.6289295554161072, -0.40545299649238586, 0.27977949380874634, 0.42974555492401123, 0.33000102639198303, -0.42540305852890015, 0.724989116191864, -0.4186973571777344, 0.5411099791526794, 0.39394405484199524, -0.03246307373046875, 0.9034308195114136, -0.673832893371582, -0.5057296752929688, -0.03209378197789192, 1.1111341714859009, 0.5526066422462463, 0.029910052195191383, 0.2472967654466629, 0.04287281259894371, -0.12887291610240936, 0.016329962760210037, -0.8909111022949219, -0.2886657416820526, 0.18130303919315338, -0.41697075963020325, -0.47838571667671204, 0.3571981191635132, -0.9163532257080078, 0.1331552416086197, -0.027763202786445618, 0.43053489923477173, -0.49051743745803833, -0.16635462641716003, 0.23838771879673004, -0.4468744397163391, 0.8115522861480713, -0.1675255447626114, -0.7353114485740662, 0.39678242802619934, 0.5410149693489075, 0.784298837184906, -0.1211325153708458, -0.4265548884868622, -0.1081470176577568, -0.11070580780506134, -0.23487190902233124, 0.508064866065979, -0.2861799895763397, -0.41805848479270935, -0.29265543818473816, 0.2718096375465393, -0.25166159868240356, -0.3245927691459656, 0.7062347531318665, -0.19357989728450775, 0.2413794994354248, -0.4427706003189087, -0.6489461660385132, 0.13774316012859344, 0.38505008816719055, -0.3925805985927582, 1.3385791778564453, 0.2665221691131592, -0.8289971351623535, 0.4047022759914398, -0.6179430484771729, -0.18655851483345032, -0.06011517718434334, -0.04696295037865639, -0.8087918758392334, -0.2833195626735687, 0.14831335842609406, 0.3558168113231659, -0.162660151720047, -0.15127523243427277, -0.3807903826236725, -0.31645241379737854, 0.2959304749965668, -0.1566019356250763, 1.2287077903747559, -0.03613342344760895, -0.774085283279419, -0.13111703097820282, -1.2351021766662598, 0.315789133310318, 0.23997272551059723, -0.4238842725753784, -0.1850186139345169, -0.4511563777923584, -0.027556905522942543, 0.17808035016059875, 0.2871762216091156, -0.8291756510734558, 0.27131298184394836, -0.32549166679382324, 0.21293747425079346, 1.2470638751983643, 0.023602962493896484, 0.14857177436351776, -0.5417450070381165, 0.5137143731117249, 0.17939290404319763, 0.19191579520702362, 0.38855692744255066, -0.6595947742462158, -0.7770178318023682, -0.48936787247657776, -0.02710920386016369, 0.6314653158187866, -0.19835034012794495, 1.1592605113983154, 0.03324649855494499, -0.8925891518592834, -0.45649904012680054, -0.1399817019701004, 0.5251403450965881, 0.8404254913330078, 0.6075019240379333, -0.0717896893620491, -0.6065444350242615, -1.105739951133728, -0.277496874332428, -0.22804036736488342, 0.11015748977661133, 0.259103000164032, 1.0789235830307007, -0.2316112518310547, 0.6132488250732422, -1.0733213424682617, -0.20499449968338013, 0.1836307793855667, -0.0384562723338604, 0.7595784664154053, 0.761616587638855, 0.5785502195358276, -0.6497098803520203, -0.5392917394638062, 0.15406043827533722, -0.8699372410774231, -0.07671178132295609, 0.15880337357521057, -0.34996935725212097, 0.1394404023885727, 0.11949893832206726, -0.7155854105949402, 0.5770577788352966, 0.22845524549484253, -1.099782943725586, 1.0621925592422485, -0.3119010627269745, 0.5852129459381104, -1.0019676685333252, 0.23362797498703003, -0.04896952211856842, 0.060419462621212006, -0.4858417212963104, 0.019760627299547195, 0.07177326083183289, 0.4496896266937256, -0.5000558495521545, 0.8237226009368896, -0.6883192658424377, -0.05439998209476471, 0.4445819854736328, 0.09782589972019196, -0.09853009879589081, 0.39393216371536255, -0.22355268895626068, 0.8008732795715332, 0.7673822045326233, -0.45507481694221497, 0.4860134422779083, 0.4210508167743683, -0.2239706665277481, 0.7068411707878113, -0.5247300267219543, -0.28960883617401123, 0.28392353653907776, -0.037665560841560364, -0.8932939767837524, -0.4977216124534607, 0.06585561484098434, -0.6200625896453857, -0.08032085746526718, 0.34959161281585693, -0.2709072232246399, -0.8060200214385986, -0.9284083843231201, 0.3983922302722931, 0.7659680843353271, -0.41818511486053467, -0.1756579875946045, 0.08909744024276733, 0.08747777342796326, -0.81151282787323, -0.8158098459243774, -0.5212932229042053, -0.19764575362205505, -0.7022653818130493, 0.3291090428829193, -0.24195952713489532, -0.2636306881904602, -0.04249739274382591, -0.2564135789871216, -0.3401544392108917, 0.003463990753516555, 0.15651842951774597, 0.6471942663192749, -0.44566240906715393, -0.2829374372959137, -0.23848924040794373, -0.14444732666015625, 0.2295888513326645, -0.12042181938886642, 0.40923255681991577, -0.4405643045902252, -0.4021373391151428, -0.4581371247768402, 0.004871587734669447, 0.6874634623527527, -0.08871182799339294, 0.7339990735054016, 0.4458572268486023, -0.32668182253837585, 0.045487262308597565, -0.2958924174308777, -0.2792315185070038, -0.5725813508033752, 0.26476314663887024, -0.47331714630126953, -1.0366257429122925, 0.7457048892974854, 0.5189903378486633, 0.05137403681874275, 1.152199387550354, 0.5894113183021545, -0.3216334581375122, 1.0005154609680176, 0.03068145364522934, 0.3090740740299225, 0.38656726479530334, -0.679857075214386, 0.10613944381475449, -0.9531874060630798, -0.3166359066963196, -0.6106058359146118, -0.503472626209259, -0.7183063626289368, -0.09789269417524338, 0.3178405463695526, 0.13522134721279144, -0.6944003105163574, 0.5862946510314941, -0.81763756275177, 0.5611932277679443, 0.5921264886856079, 0.29269856214523315, 0.1354953944683075, -0.10831963270902634, -0.3674218952655792, -0.07642075419425964, -0.4611562490463257, -0.25535452365875244, 1.2446223497390747, 0.29913389682769775, 0.6963106989860535, 0.09886772930622101, 0.8699350357055664, 0.12387939542531967, -0.09684114158153534, -0.5971620678901672, 0.621902346611023, 0.14574888348579407, -0.8205545544624329, -0.46880435943603516, -0.5135483145713806, -1.1155142784118652, 0.4036910831928253, -0.1282978653907776, -0.8808704018592834, 0.14151370525360107, 0.05935882776975632, -0.20276524126529694, 0.49568817019462585, -0.5326436161994934, 0.8168989419937134, -0.13769184052944183, -0.4988361895084381, 0.06020105257630348, -0.8536064028739929, 0.42346033453941345, 0.23960453271865845, 0.23261423408985138, 0.023276709020137787, 0.2278137356042862, 1.1967320442199707, -0.8280333280563354, 0.4276735782623291, 0.05828336626291275, 0.05054652690887451, 0.366444855928421, -0.20037484169006348, 0.5008417963981628, 0.07078274339437485, -0.05248149484395981, -0.07829739898443222, 0.24336282908916473, -0.9085482954978943, -0.02542235516011715, 0.8928015232086182, -0.9506564140319824, -0.6082568764686584, -0.9007554650306702, -0.48036473989486694, 0.09601985663175583, 0.5723464488983154, 0.3646514117717743, 0.5170132517814636, 0.0013950146967545152, 0.4609796702861786, 0.8133783340454102, -0.11109250783920288, 0.5988604426383972, 0.22884805500507355, 0.12706683576107025, -0.6748427748680115, 0.8364458084106445, 0.06542004644870758, 0.385611355304718, 0.2789672613143921, 0.40623798966407776, -0.5363373160362244, -0.19093218445777893, -0.2367764413356781, 0.5267748236656189, -0.6072350144386292, -0.23392486572265625, -0.371127188205719, -0.3530540466308594, -0.7729228138923645, -0.5791130661964417, -0.30520790815353394, -0.5254054665565491, -0.4825858771800995, -0.46182167530059814, 0.5529289245605469, 0.4586159288883209, -0.36866047978401184, 0.054969049990177155, -0.47059720754623413, 0.2860187590122223, 0.3335420787334442, 0.5557649731636047, -0.3720603883266449, -0.5806697607040405, 0.061166588217020035, -0.11225653439760208, -0.5832816958427429, -0.9632201790809631, 0.31657031178474426, -0.006486102938652039, 0.5167457461357117, 0.5672959685325623, 0.05912988260388374, 0.8508383631706238, -0.21026715636253357, 1.0764484405517578, 0.34706243872642517, -0.7732844948768616, 0.7437419891357422, -0.3143044114112854, 0.1734326034784317, 0.6690129041671753, 0.2101658135652542, -0.17150697112083435, -0.6198392510414124, -1.2645436525344849, -0.8251497745513916, 0.6592186093330383, 0.41890597343444824, -0.3117704391479492, 0.08680035173892975, 0.16633887588977814, -0.2641128599643707, -0.1430298089981079, -0.6416855454444885, -0.8649364113807678, -0.14415669441223145, -0.4778237044811249, 0.10838799178600311, 0.015360589139163494, -0.39490827918052673, -0.8356820344924927, 0.9656155705451965, -0.010316782630980015, 0.6244950890541077, 0.502788782119751, 0.061163920909166336, 0.06304577738046646, 0.47216883301734924, 0.9125664234161377, 0.745541512966156, -0.47078338265419006, 0.41800010204315186, 0.36260056495666504, -1.0532052516937256, 0.4683898091316223, 0.2655584514141083, -0.08372938632965088, -0.04221272096037865, 0.49780595302581787, 0.46980321407318115, -0.0013521882938221097, -0.24077299237251282, 0.6231129765510559, 0.006941102910786867, -0.5770435929298401, -0.40259334444999695, 0.08490296453237534, -0.15231239795684814, -0.05384271964430809, 0.4019506275653839, -0.1394300013780594, -0.01997196301817894, -0.5272489190101624, 0.4515465199947357, 0.36559465527534485, -0.48970040678977966, -0.16521044075489044, 0.7154830694198608, -0.1606469452381134, -0.159586563706398, 0.37250691652297974, -0.21222135424613953, -0.6204819679260254, 1.1520518064498901, 0.5775682926177979, 0.6950914263725281, -0.23901118338108063, -0.07258404046297073, 0.9430016279220581, 0.3937876224517822, -0.0561627596616745, 0.540986955165863, 0.3122926652431488, -0.23305481672286987, 0.2058841586112976, -0.8492103219032288, -0.043245889246463776, 0.17298640310764313, -0.8664376139640808, 0.3140685558319092, -0.49752575159072876, -0.2213672250509262, 0.01974416896700859, 0.3774910271167755, -0.48705363273620605, 0.5231900811195374, -0.41184201836586, 1.2123788595199585, -0.9755120277404785, 0.6830642819404602, 0.7713262438774109, -0.5081506371498108, -1.0066944360733032, -0.48980775475502014, 0.031359609216451645, -0.8059382438659668, 0.5318467020988464, -0.033209241926670074, 0.20229996740818024, -0.08246459066867828, -0.7082616090774536, -0.9682283997535706, 1.4283397197723389, -0.07367916405200958, -0.43382373452186584, 0.2173210233449936, -0.010026831179857254, 0.4550350606441498, 0.12940536439418793, 0.5975228548049927, 0.7798181772232056, 0.8476505875587463, -0.08363336324691772, -0.7987125515937805, 0.3554493486881256, -0.5282084941864014, -0.3619387745857239, 0.46348506212234497, -0.9259364604949951, 1.20591139793396, -0.0467437244951725, 0.18744224309921265, -0.17464138567447662, 0.6166489720344543, 0.7888422608375549, 0.33226361870765686, 0.3692393898963928, 0.9143672585487366, 0.8281290531158447, -0.5049037337303162, 1.0138401985168457, -0.20915348827838898, 0.863491415977478, 0.7375268936157227, 0.20851916074752808, 0.8198813796043396, 0.7071741223335266, -0.5672690868377686, 0.5517739653587341, 0.7996134161949158, -0.3187588155269623, 0.40218299627304077, 0.2872928977012634, -0.0994734987616539, -0.1352626234292984, 0.39304670691490173, -0.8858881592750549, 0.14433330297470093, 0.06413476914167404, -0.34692397713661194, 0.09328758716583252, -0.47223201394081116, 0.32462555170059204, -0.12264986336231232, -0.014364143833518028, 0.39961856603622437, 0.03403134644031525, -0.47996655106544495, 0.9483477473258972, -0.16718241572380066, 0.742294430732727, -0.4996558427810669, -0.09395121037960052, -0.3302909731864929, 0.571723997592926, -0.4383118748664856, -1.0577099323272705, 0.13342326879501343, 0.07486192137002945, -0.13707798719406128, -0.14621207118034363, 0.6731740236282349, -0.19462347030639648, -0.7680389285087585, 0.1418432742357254, 0.07346509397029877, 0.11167720705270767, 0.4921132028102875, -0.6693438291549683, -0.3488629162311554, -0.06360040605068207, -0.5915956497192383, 0.129276841878891, 0.26970338821411133, 0.2975196838378906, 0.5463393330574036, 0.6237757802009583, 0.14126883447170258, 0.412558376789093, -0.575663149356842, 0.7733719944953918, -1.0194149017333984, -0.7201473712921143, -0.9260630011558533, 0.5166133642196655, -0.35000479221343994, -0.905965268611908, 0.9648929238319397, 1.0650222301483154, 0.8880065083503723, -0.01729930192232132, 0.6769883036613464, -0.42253372073173523, 0.257933109998703, -0.3820870816707611, 0.9362601637840271, -0.8727743029594421, -0.24266454577445984, -0.2154812216758728, -0.6725607514381409, -0.3372163772583008, 0.8099046349525452, -0.15314564108848572, 0.016514260321855545, 1.07789146900177, 0.6726670265197754, -0.10655239224433899, 0.016027919948101044, -0.10907850414514542, 0.579895555973053, 0.3902088701725006, 0.9715606570243835, 0.6084423661231995, -0.8066854476928711, 0.3284202814102173, -0.5016489028930664, -0.4405231773853302, -0.42647552490234375, -0.46989667415618896, -0.8599265813827515, -0.5000250935554504, -0.2701258361339569, -0.6305871605873108, -0.1289079785346985, 1.000895380973816, 0.48725295066833496, -0.9372871518135071, -0.4308723211288452, -0.09419342130422592, 0.16162464022636414, -0.5889529585838318, -0.4148638844490051, 0.7316380143165588, -0.09005186706781387, -0.5197793245315552, 0.18557901680469513, -0.1365833431482315, 0.21479032933712006, 0.10904394835233688, -0.4369174540042877, -0.719231367111206, 0.047635868191719055, 0.46514904499053955, 0.3553444445133209, -0.7126516103744507, -0.7116261720657349, 0.29113441705703735, -0.5257644057273865, 0.4511777460575104, -0.009351805783808231, -0.4910290539264679, 0.031169019639492035, 0.7626912593841553, 0.4573306739330292, 0.6532716155052185, -0.013082392513751984, 0.039925891906023026, -0.6666544079780579, 0.171351820230484, -0.0021024488378316164, 0.2989504635334015, -0.07337380200624466, -0.30890050530433655, 0.8024612665176392, 0.6782243847846985, -0.5362308025360107, -1.0961591005325317, -0.4006692171096802, -1.465255618095398, -0.04138607159256935, 1.1862295866012573, -0.008244015276432037, -0.5022097229957581, 0.23412826657295227, -0.15363742411136627, 0.19395285844802856, -0.3593733608722687, 0.7682473659515381, 0.7700245976448059, -0.3821823000907898, 0.15096142888069153, -0.6220229268074036, 0.3576706349849701, 0.5159028768539429, -1.2135648727416992, -0.12234657257795334, 0.22800655663013458, 0.333911657333374, 0.3714616894721985, 0.6281638145446777, -0.09303077310323715, 0.25288814306259155, 0.23550738394260406, 0.042169902473688126, -0.004729255102574825, 0.0413132980465889, -0.20701773464679718, 0.12289891391992569, -0.26438310742378235, -0.47339174151420593 ]
open-llm-leaderboard/details_CoolWP__llama-2-13b-guanaco-fp16
open-llm-leaderboard
2023-08-27T12:41:46Z
201
0
[ "region:us" ]
null
2023-08-18T18:56:54Z
--- pretty_name: Evaluation run of CoolWP/llama-2-13b-guanaco-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [CoolWP/llama-2-13b-guanaco-fp16](https://huggingface.co/CoolWP/llama-2-13b-guanaco-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CoolWP__llama-2-13b-guanaco-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-17T18:49:30.894423](https://huggingface.co/datasets/open-llm-leaderboard/details_CoolWP__llama-2-13b-guanaco-fp16/blob/main/results_2023-08-17T18%3A49%3A30.894423.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5557402565625233,\n\ \ \"acc_stderr\": 0.03433097920024075,\n \"acc_norm\": 0.5600027152011281,\n\ \ \"acc_norm_stderr\": 0.03430992590405376,\n \"mc1\": 0.29865361077111385,\n\ \ \"mc1_stderr\": 0.016021570613768542,\n \"mc2\": 0.43400538092704843,\n\ \ \"mc2_stderr\": 0.014284105671223521\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.552901023890785,\n \"acc_stderr\": 0.014529380160526843,\n\ \ \"acc_norm\": 0.5955631399317406,\n \"acc_norm_stderr\": 0.014342036483436177\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.615116510655248,\n\ \ \"acc_stderr\": 0.004855733568540267,\n \"acc_norm\": 0.8239394542919737,\n\ \ \"acc_norm_stderr\": 0.003800932770597752\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\ \ \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n\ \ \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.04063302731486671,\n\ \ \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.04063302731486671\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n\ \ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6037735849056604,\n \"acc_stderr\": 0.030102793781791197,\n\ \ \"acc_norm\": 0.6037735849056604,\n \"acc_norm_stderr\": 0.030102793781791197\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5763888888888888,\n\ \ \"acc_stderr\": 0.0413212501972337,\n \"acc_norm\": 0.5763888888888888,\n\ \ \"acc_norm_stderr\": 0.0413212501972337\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n\ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5144508670520231,\n\ \ \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.5144508670520231,\n\ \ \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171452,\n\ \ \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171452\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n\ \ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146268,\n\ \ \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146268\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\ \ \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n\ \ \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n\ \ \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3333333333333333,\n \"acc_stderr\": 0.0242785680243077,\n \"acc_norm\"\ : 0.3333333333333333,\n \"acc_norm_stderr\": 0.0242785680243077\n },\n\ \ \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n\ \ \"acc_stderr\": 0.04190596438871137,\n \"acc_norm\": 0.3253968253968254,\n\ \ \"acc_norm_stderr\": 0.04190596438871137\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6645161290322581,\n\ \ \"acc_stderr\": 0.02686020644472435,\n \"acc_norm\": 0.6645161290322581,\n\ \ \"acc_norm_stderr\": 0.02686020644472435\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n\ \ \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\"\ : 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.03681050869161551,\n\ \ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03681050869161551\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6919191919191919,\n \"acc_stderr\": 0.032894773300986155,\n \"\ acc_norm\": 0.6919191919191919,\n \"acc_norm_stderr\": 0.032894773300986155\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8031088082901554,\n \"acc_stderr\": 0.028697873971860677,\n\ \ \"acc_norm\": 0.8031088082901554,\n \"acc_norm_stderr\": 0.028697873971860677\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5102564102564102,\n \"acc_stderr\": 0.025345672221942374,\n\ \ \"acc_norm\": 0.5102564102564102,\n \"acc_norm_stderr\": 0.025345672221942374\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.29259259259259257,\n \"acc_stderr\": 0.02773896963217609,\n \ \ \"acc_norm\": 0.29259259259259257,\n \"acc_norm_stderr\": 0.02773896963217609\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.032145368597886394,\n\ \ \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.032145368597886394\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31125827814569534,\n \"acc_stderr\": 0.037804458505267334,\n \"\ acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.037804458505267334\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7486238532110092,\n \"acc_stderr\": 0.018599206360287415,\n \"\ acc_norm\": 0.7486238532110092,\n \"acc_norm_stderr\": 0.018599206360287415\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502326,\n \"\ acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502326\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7598039215686274,\n \"acc_stderr\": 0.02998373305591362,\n \"\ acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.02998373305591362\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293426,\n \ \ \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n\ \ \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n\ \ \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6106870229007634,\n \"acc_stderr\": 0.04276486542814591,\n\ \ \"acc_norm\": 0.6106870229007634,\n \"acc_norm_stderr\": 0.04276486542814591\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908706,\n \"\ acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908706\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n\ \ \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n\ \ \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724146,\n\ \ \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724146\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\ \ \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n\ \ \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n\ \ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7991452991452992,\n\ \ \"acc_stderr\": 0.026246772946890474,\n \"acc_norm\": 0.7991452991452992,\n\ \ \"acc_norm_stderr\": 0.026246772946890474\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7586206896551724,\n\ \ \"acc_stderr\": 0.015302380123542108,\n \"acc_norm\": 0.7586206896551724,\n\ \ \"acc_norm_stderr\": 0.015302380123542108\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6329479768786127,\n \"acc_stderr\": 0.02595005433765408,\n\ \ \"acc_norm\": 0.6329479768786127,\n \"acc_norm_stderr\": 0.02595005433765408\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3553072625698324,\n\ \ \"acc_stderr\": 0.01600698993480319,\n \"acc_norm\": 0.3553072625698324,\n\ \ \"acc_norm_stderr\": 0.01600698993480319\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.027914055510468008,\n\ \ \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.027914055510468008\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6205787781350482,\n\ \ \"acc_stderr\": 0.027559949802347813,\n \"acc_norm\": 0.6205787781350482,\n\ \ \"acc_norm_stderr\": 0.027559949802347813\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6358024691358025,\n \"acc_stderr\": 0.026774929899722334,\n\ \ \"acc_norm\": 0.6358024691358025,\n \"acc_norm_stderr\": 0.026774929899722334\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3971631205673759,\n \"acc_stderr\": 0.0291898056735871,\n \ \ \"acc_norm\": 0.3971631205673759,\n \"acc_norm_stderr\": 0.0291898056735871\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41851368970013036,\n\ \ \"acc_stderr\": 0.012599505608336461,\n \"acc_norm\": 0.41851368970013036,\n\ \ \"acc_norm_stderr\": 0.012599505608336461\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.03032024326500413,\n\ \ \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.03032024326500413\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5408496732026143,\n \"acc_stderr\": 0.020160213617222516,\n \ \ \"acc_norm\": 0.5408496732026143,\n \"acc_norm_stderr\": 0.020160213617222516\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\ \ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\ \ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6081632653061224,\n \"acc_stderr\": 0.031251275910891656,\n\ \ \"acc_norm\": 0.6081632653061224,\n \"acc_norm_stderr\": 0.031251275910891656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7412935323383084,\n\ \ \"acc_stderr\": 0.030965903123573026,\n \"acc_norm\": 0.7412935323383084,\n\ \ \"acc_norm_stderr\": 0.030965903123573026\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366255,\n \ \ \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366255\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.43373493975903615,\n\ \ \"acc_stderr\": 0.03858158940685517,\n \"acc_norm\": 0.43373493975903615,\n\ \ \"acc_norm_stderr\": 0.03858158940685517\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.03274485211946956,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.03274485211946956\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29865361077111385,\n\ \ \"mc1_stderr\": 0.016021570613768542,\n \"mc2\": 0.43400538092704843,\n\ \ \"mc2_stderr\": 0.014284105671223521\n }\n}\n```" repo_url: https://huggingface.co/CoolWP/llama-2-13b-guanaco-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|arc:challenge|25_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hellaswag|10_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T18:49:30.894423.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:49:30.894423.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T18_49_30.894423 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T18:49:30.894423.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T18:49:30.894423.parquet' - config_name: results data_files: - split: 2023_08_17T18_49_30.894423 path: - results_2023-08-17T18:49:30.894423.parquet - split: latest path: - results_2023-08-17T18:49:30.894423.parquet --- # Dataset Card for Evaluation run of CoolWP/llama-2-13b-guanaco-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/CoolWP/llama-2-13b-guanaco-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [CoolWP/llama-2-13b-guanaco-fp16](https://huggingface.co/CoolWP/llama-2-13b-guanaco-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CoolWP__llama-2-13b-guanaco-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T18:49:30.894423](https://huggingface.co/datasets/open-llm-leaderboard/details_CoolWP__llama-2-13b-guanaco-fp16/blob/main/results_2023-08-17T18%3A49%3A30.894423.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5557402565625233, "acc_stderr": 0.03433097920024075, "acc_norm": 0.5600027152011281, "acc_norm_stderr": 0.03430992590405376, "mc1": 0.29865361077111385, "mc1_stderr": 0.016021570613768542, "mc2": 0.43400538092704843, "mc2_stderr": 0.014284105671223521 }, "harness|arc:challenge|25": { "acc": 0.552901023890785, "acc_stderr": 0.014529380160526843, "acc_norm": 0.5955631399317406, "acc_norm_stderr": 0.014342036483436177 }, "harness|hellaswag|10": { "acc": 0.615116510655248, "acc_stderr": 0.004855733568540267, "acc_norm": 0.8239394542919737, "acc_norm_stderr": 0.003800932770597752 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5263157894736842, "acc_stderr": 0.04063302731486671, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.04063302731486671 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6037735849056604, "acc_stderr": 0.030102793781791197, "acc_norm": 0.6037735849056604, "acc_norm_stderr": 0.030102793781791197 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5763888888888888, "acc_stderr": 0.0413212501972337, "acc_norm": 0.5763888888888888, "acc_norm_stderr": 0.0413212501972337 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5144508670520231, "acc_stderr": 0.03810871630454764, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171452, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171452 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146268, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0242785680243077, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0242785680243077 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871137, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871137 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6645161290322581, "acc_stderr": 0.02686020644472435, "acc_norm": 0.6645161290322581, "acc_norm_stderr": 0.02686020644472435 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03681050869161551, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03681050869161551 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6919191919191919, "acc_stderr": 0.032894773300986155, "acc_norm": 0.6919191919191919, "acc_norm_stderr": 0.032894773300986155 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8031088082901554, "acc_stderr": 0.028697873971860677, "acc_norm": 0.8031088082901554, "acc_norm_stderr": 0.028697873971860677 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5102564102564102, "acc_stderr": 0.025345672221942374, "acc_norm": 0.5102564102564102, "acc_norm_stderr": 0.025345672221942374 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.02773896963217609, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.02773896963217609 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5714285714285714, "acc_stderr": 0.032145368597886394, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.032145368597886394 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.037804458505267334, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.037804458505267334 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7486238532110092, "acc_stderr": 0.018599206360287415, "acc_norm": 0.7486238532110092, "acc_norm_stderr": 0.018599206360287415 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502326, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502326 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7598039215686274, "acc_stderr": 0.02998373305591362, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.02998373305591362 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.729957805907173, "acc_stderr": 0.028900721906293426, "acc_norm": 0.729957805907173, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.04276486542814591, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908706, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908706 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724146, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724146 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7991452991452992, "acc_stderr": 0.026246772946890474, "acc_norm": 0.7991452991452992, "acc_norm_stderr": 0.026246772946890474 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7586206896551724, "acc_stderr": 0.015302380123542108, "acc_norm": 0.7586206896551724, "acc_norm_stderr": 0.015302380123542108 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6329479768786127, "acc_stderr": 0.02595005433765408, "acc_norm": 0.6329479768786127, "acc_norm_stderr": 0.02595005433765408 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3553072625698324, "acc_stderr": 0.01600698993480319, "acc_norm": 0.3553072625698324, "acc_norm_stderr": 0.01600698993480319 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6111111111111112, "acc_stderr": 0.027914055510468008, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.027914055510468008 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6205787781350482, "acc_stderr": 0.027559949802347813, "acc_norm": 0.6205787781350482, "acc_norm_stderr": 0.027559949802347813 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6358024691358025, "acc_stderr": 0.026774929899722334, "acc_norm": 0.6358024691358025, "acc_norm_stderr": 0.026774929899722334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3971631205673759, "acc_stderr": 0.0291898056735871, "acc_norm": 0.3971631205673759, "acc_norm_stderr": 0.0291898056735871 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41851368970013036, "acc_stderr": 0.012599505608336461, "acc_norm": 0.41851368970013036, "acc_norm_stderr": 0.012599505608336461 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03032024326500413, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03032024326500413 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5408496732026143, "acc_stderr": 0.020160213617222516, "acc_norm": 0.5408496732026143, "acc_norm_stderr": 0.020160213617222516 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6081632653061224, "acc_stderr": 0.031251275910891656, "acc_norm": 0.6081632653061224, "acc_norm_stderr": 0.031251275910891656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7412935323383084, "acc_stderr": 0.030965903123573026, "acc_norm": 0.7412935323383084, "acc_norm_stderr": 0.030965903123573026 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.039427724440366255, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366255 }, "harness|hendrycksTest-virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685517, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685517 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.03274485211946956, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.03274485211946956 }, "harness|truthfulqa:mc|0": { "mc1": 0.29865361077111385, "mc1_stderr": 0.016021570613768542, "mc2": 0.43400538092704843, "mc2_stderr": 0.014284105671223521 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7151070833206177, -0.8577357530593872, 0.28272807598114014, 0.22101090848445892, -0.1644308716058731, -0.03920479491353035, 0.0071110851131379604, -0.2556341886520386, 0.6041421890258789, -0.074427530169487, -0.48434415459632874, -0.7022051215171814, -0.4386819005012512, 0.22539402544498444, -0.04401274397969246, 0.8495199680328369, -0.17448706924915314, -0.17750857770442963, 0.0650513768196106, -0.025662437081336975, -0.23694217205047607, -0.32843613624572754, -0.4527266323566437, -0.37483474612236023, 0.17698383331298828, 0.4463672339916229, 0.4717870354652405, 0.8144222497940063, 0.689191460609436, 0.2977955937385559, -0.3229374885559082, -0.015549101866781712, -0.16961635649204254, -0.27960166335105896, 0.38607487082481384, -0.34212857484817505, -0.8417455554008484, 0.3115508556365967, 0.7008757591247559, 0.6042180061340332, -0.10868457704782486, 0.2877504527568817, 0.021480441093444824, 0.5522105693817139, -0.35316020250320435, 0.04436147212982178, -0.30499595403671265, 0.24135027825832367, -0.2001616209745407, -0.2635132074356079, -0.28333643078804016, -0.23645859956741333, -0.1304563283920288, -0.8713099360466003, 0.2804555296897888, 0.30769723653793335, 1.602683424949646, -0.12669065594673157, -0.2413627952337265, 0.10455381870269775, -0.08864247798919678, 1.0003907680511475, -0.8953753113746643, 0.32160040736198425, 0.7763673663139343, 0.0797962099313736, -0.20430949330329895, -0.6089457273483276, -0.6369830965995789, 0.06541643291711807, -0.3684457540512085, 0.325400710105896, -0.10823347419500351, -0.17445430159568787, 0.3661511242389679, 0.6884984374046326, -0.6425356864929199, 0.21199409663677216, -0.6157661080360413, -0.17035949230194092, 1.064997911453247, 0.3627239763736725, 0.0650574192404747, -0.3621402978897095, -0.7044435143470764, -0.6448753476142883, -0.3896125257015228, 0.2582060992717743, 0.4360021948814392, 0.32351750135421753, -0.44196122884750366, 0.6851496696472168, -0.4411974549293518, 0.5565015077590942, 0.40348801016807556, 0.004677358083426952, 0.9039700031280518, -0.630955159664154, -0.5373254418373108, -0.11027908325195312, 1.1274162530899048, 0.5624645948410034, 0.057057857513427734, 0.20713558793067932, 0.02917511761188507, -0.1020287275314331, -0.0018976691644638777, -0.8412250280380249, -0.2977324426174164, 0.17289260029792786, -0.3908144235610962, -0.5263470411300659, 0.3451750576496124, -0.8878135681152344, 0.15290406346321106, -0.033877547830343246, 0.3966701030731201, -0.4442528486251831, -0.10506615787744522, 0.23244985938072205, -0.4260377585887909, 0.8299389481544495, -0.1590319275856018, -0.7874617576599121, 0.3833271265029907, 0.5229636430740356, 0.7693853378295898, -0.12927503883838654, -0.4563210904598236, -0.1071285530924797, -0.11310382932424545, -0.2670772671699524, 0.5712404251098633, -0.2690122127532959, -0.4118293225765228, -0.2948340177536011, 0.30983826518058777, -0.23299746215343475, -0.32557815313339233, 0.7252910137176514, -0.19950485229492188, 0.21085375547409058, -0.4273352324962616, -0.6144503355026245, 0.13715282082557678, 0.37156417965888977, -0.39458155632019043, 1.304854393005371, 0.24622273445129395, -0.8356758952140808, 0.38373786211013794, -0.5619726777076721, -0.17751961946487427, -0.017792129889130592, -0.03920198604464531, -0.7707549929618835, -0.2544780969619751, 0.16288244724273682, 0.4334237575531006, -0.18525119125843048, -0.12569983303546906, -0.37894803285598755, -0.369174599647522, 0.354798823595047, -0.15859226882457733, 1.2174618244171143, -0.013119657523930073, -0.7779906392097473, -0.09777339547872543, -1.2399406433105469, 0.34022414684295654, 0.22602856159210205, -0.38204875588417053, -0.19806768000125885, -0.4899163246154785, -0.014216573908925056, 0.19792640209197998, 0.31860262155532837, -0.7973712682723999, 0.3066968321800232, -0.323556512594223, 0.1747044324874878, 1.2651363611221313, 0.02142280898988247, 0.12631312012672424, -0.5187608599662781, 0.5430212020874023, 0.16397982835769653, 0.20378193259239197, 0.40043017268180847, -0.633575439453125, -0.8017904162406921, -0.5048699378967285, -0.08347751945257187, 0.631255030632019, -0.1757425218820572, 1.1357358694076538, 0.10541228204965591, -0.9321785569190979, -0.42447054386138916, -0.11589986830949783, 0.5129894614219666, 0.8138456344604492, 0.5854730010032654, -0.06067673861980438, -0.6134045720100403, -1.111757516860962, -0.27645257115364075, -0.18679910898208618, 0.12551096081733704, 0.2135353982448578, 1.041287899017334, -0.28958266973495483, 0.5670508742332458, -1.0864863395690918, -0.1996176391839981, 0.18363986909389496, -0.07441431283950806, 0.807751476764679, 0.7211069464683533, 0.5826267600059509, -0.644231915473938, -0.5137491822242737, 0.19953987002372742, -0.8845460414886475, -0.09749080240726471, 0.16904018819332123, -0.3358946144580841, 0.10362334549427032, 0.1319543570280075, -0.7126464247703552, 0.5382386445999146, 0.23878993093967438, -1.0843876600265503, 1.052664041519165, -0.3552052974700928, 0.580321192741394, -0.9721362590789795, 0.18672826886177063, -0.05575936287641525, 0.05386300012469292, -0.5016516447067261, 0.06296946853399277, 0.07807479798793793, 0.446733295917511, -0.4936327636241913, 0.7807849645614624, -0.6462743282318115, -0.042895831167697906, 0.4273894727230072, 0.10025955736637115, -0.09692789614200592, 0.37476637959480286, -0.2341366708278656, 0.7619092464447021, 0.7404770851135254, -0.43563899397850037, 0.5426798462867737, 0.4050656259059906, -0.21417023241519928, 0.7003248929977417, -0.5011541843414307, -0.25785502791404724, 0.30765217542648315, -0.036992501467466354, -0.8410115838050842, -0.516269326210022, 0.02821374498307705, -0.5669913291931152, -0.13349388539791107, 0.365791916847229, -0.25461599230766296, -0.8291102051734924, -0.9400703310966492, 0.3115130364894867, 0.694743275642395, -0.42636916041374207, -0.2177339792251587, 0.0842842161655426, 0.11290281265974045, -0.844160258769989, -0.8599401712417603, -0.4771319627761841, -0.22677652537822723, -0.6880902647972107, 0.33581218123435974, -0.2602088153362274, -0.3014332354068756, -0.08105668425559998, -0.20993581414222717, -0.34022992849349976, 0.002875993028283119, 0.158858984708786, 0.6809889674186707, -0.3748657703399658, -0.32159116864204407, -0.26255717873573303, -0.15351641178131104, 0.23156686127185822, -0.08066152781248093, 0.3771515488624573, -0.48570629954338074, -0.3595423400402069, -0.4495287537574768, -0.016375964507460594, 0.7195590138435364, -0.08622847497463226, 0.7158876061439514, 0.44664084911346436, -0.31509432196617126, -0.0008126945467665792, -0.29745325446128845, -0.2567882239818573, -0.5824806094169617, 0.2621982991695404, -0.46588072180747986, -1.0244709253311157, 0.8017652034759521, 0.5649607181549072, 0.066914863884449, 1.126811146736145, 0.6175134181976318, -0.3022556006908417, 0.9887707829475403, 0.06889967620372772, 0.3187599778175354, 0.3967636525630951, -0.6873539686203003, 0.09351902455091476, -0.883354663848877, -0.3663997948169708, -0.609838604927063, -0.47168514132499695, -0.7331451773643494, -0.07799281179904938, 0.24614593386650085, 0.12856917083263397, -0.6751685738563538, 0.5901976227760315, -0.847411572933197, 0.5800072550773621, 0.5491170287132263, 0.252490371465683, 0.1751091480255127, -0.1513591855764389, -0.3694742023944855, -0.13392019271850586, -0.43628454208374023, -0.22447039186954498, 1.1990716457366943, 0.27233409881591797, 0.7428697347640991, 0.06848158687353134, 0.8737679123878479, 0.13281114399433136, -0.09611966460943222, -0.6093539595603943, 0.6433973908424377, 0.1015772819519043, -0.7789697051048279, -0.3926886320114136, -0.5048242807388306, -1.0580397844314575, 0.4158749580383301, -0.12425152212381363, -0.8400391936302185, 0.1447785645723343, 0.024883894249796867, -0.16134890913963318, 0.48211851716041565, -0.544510543346405, 0.8015349507331848, -0.1370408535003662, -0.4763348400592804, 0.07481572777032852, -0.8112478256225586, 0.48164045810699463, 0.2153213620185852, 0.2664293646812439, 0.0076581803150475025, 0.23607350885868073, 1.205665111541748, -0.8450852632522583, 0.4032730460166931, 0.07369783520698547, 0.00037188600981608033, 0.3498086929321289, -0.18943947553634644, 0.5227301120758057, 0.09409867972135544, -0.02526361681520939, -0.08295872062444687, 0.26185518503189087, -0.8658881783485413, -0.03730078041553497, 0.9564022421836853, -0.9498053193092346, -0.6138759255409241, -0.8972784876823425, -0.5489699840545654, 0.07576987892389297, 0.5601825714111328, 0.3865358531475067, 0.4867441952228546, 0.027497796341776848, 0.43726032972335815, 0.8384113311767578, -0.12300948798656464, 0.6126184463500977, 0.23832789063453674, 0.11401862651109695, -0.6489558219909668, 0.8077059388160706, 0.06034404784440994, 0.34075772762298584, 0.2582624554634094, 0.41063159704208374, -0.5026537179946899, -0.18813206255435944, -0.1845758706331253, 0.5013611912727356, -0.6337230205535889, -0.2829524874687195, -0.37836411595344543, -0.3512613773345947, -0.7454354166984558, -0.6312522292137146, -0.3217949867248535, -0.5246908068656921, -0.48177370429039, -0.4919533133506775, 0.6152291893959045, 0.4757300615310669, -0.40910568833351135, 0.05129888281226158, -0.43530410528182983, 0.28112587332725525, 0.35270416736602783, 0.5029131770133972, -0.3792904019355774, -0.563920795917511, 0.05484387278556824, -0.15364797413349152, -0.5542497634887695, -0.9502133131027222, 0.3434186279773712, -0.05023365095257759, 0.49978935718536377, 0.5887678861618042, 0.05984458699822426, 0.850787878036499, -0.19473254680633545, 1.0470136404037476, 0.3324786424636841, -0.7885523438453674, 0.7595430612564087, -0.3269462287425995, 0.17433351278305054, 0.6338765025138855, 0.18563124537467957, -0.18781134486198425, -0.6848841905593872, -1.3096551895141602, -0.831508219242096, 0.6304309368133545, 0.3510302007198334, -0.23803886771202087, 0.04001893103122711, 0.16161669790744781, -0.2849794626235962, -0.1803622990846634, -0.6695626974105835, -0.8817944526672363, -0.1461397260427475, -0.4630543291568756, 0.14369583129882812, 0.05103668197989464, -0.4040374755859375, -0.8281872272491455, 0.9145710468292236, -0.00021047865448053926, 0.5802550315856934, 0.45516613125801086, 0.08875700831413269, 0.0860959142446518, 0.48910683393478394, 0.910169780254364, 0.7371919751167297, -0.46589362621307373, 0.42072606086730957, 0.4145074188709259, -1.081113576889038, 0.47714513540267944, 0.3046184182167053, -0.05866392329335213, -0.023066869005560875, 0.48377639055252075, 0.40726685523986816, 0.046944987028837204, -0.21892215311527252, 0.6288589835166931, -0.021692831069231033, -0.5159540772438049, -0.36962300539016724, 0.1080302894115448, -0.13128730654716492, -0.01786157488822937, 0.41432639956474304, -0.1514066904783249, -0.05279753729701042, -0.5170282125473022, 0.4591667056083679, 0.36855632066726685, -0.4601362347602844, -0.15985193848609924, 0.7337979674339294, -0.1884511262178421, -0.13425055146217346, 0.34184759855270386, -0.17650404572486877, -0.6095411777496338, 1.1471951007843018, 0.6168045997619629, 0.6605668067932129, -0.2463105171918869, -0.05762970447540283, 0.882684051990509, 0.3671211004257202, -0.045134998857975006, 0.5385411977767944, 0.30269932746887207, -0.23249754309654236, 0.17594994604587555, -0.8502824306488037, -0.07071925699710846, 0.17671163380146027, -0.8526899814605713, 0.3285907804965973, -0.5312561988830566, -0.1997605860233307, -0.010105614550411701, 0.41975799202919006, -0.49299556016921997, 0.5553352236747742, -0.4255902171134949, 1.2091630697250366, -1.0036325454711914, 0.7314796447753906, 0.7433637976646423, -0.5244966745376587, -1.0119123458862305, -0.5235986709594727, 0.03131094574928284, -0.817898690700531, 0.5540956258773804, -0.06764978915452957, 0.14183469116687775, -0.08087722957134247, -0.7313370704650879, -0.9173421859741211, 1.391841173171997, -0.0390775240957737, -0.45587167143821716, 0.20203816890716553, -0.07788034528493881, 0.4675813317298889, 0.1490837186574936, 0.5769003629684448, 0.7538992166519165, 0.8119014501571655, -0.12984128296375275, -0.7392503619194031, 0.3619265556335449, -0.4963931143283844, -0.33672332763671875, 0.4313197433948517, -0.9351446032524109, 1.2003306150436401, -0.040414340794086456, 0.22696733474731445, -0.17024360597133636, 0.6780752539634705, 0.7962931990623474, 0.28407877683639526, 0.3481885492801666, 0.9194775223731995, 0.8682937622070312, -0.5116733312606812, 1.0110324621200562, -0.17476573586463928, 0.8703685402870178, 0.6746298670768738, 0.23072844743728638, 0.7926495671272278, 0.6967149972915649, -0.5649691820144653, 0.5900235772132874, 0.7892130017280579, -0.31456276774406433, 0.41388586163520813, 0.28124499320983887, -0.13064952194690704, -0.10635700821876526, 0.3833586573600769, -0.8911851644515991, 0.15406136214733124, 0.07139217108488083, -0.32849177718162537, 0.1070159450173378, -0.44188833236694336, 0.3638226389884949, -0.07346273958683014, -0.01810748502612114, 0.33532100915908813, 0.0491025485098362, -0.425801157951355, 0.9368120431900024, -0.1295507401227951, 0.807932436466217, -0.5435149073600769, -0.06846556812524796, -0.407845675945282, 0.5835885405540466, -0.4834548234939575, -1.0491584539413452, 0.15110111236572266, 0.11229326575994492, -0.1365773230791092, -0.1632053405046463, 0.7017122507095337, -0.16601331532001495, -0.7293254137039185, 0.14492899179458618, 0.062398701906204224, 0.09727305173873901, 0.5310260057449341, -0.6624945402145386, -0.31393933296203613, -0.05192295089364052, -0.5721151232719421, 0.1260460764169693, 0.27728578448295593, 0.25466710329055786, 0.5430478453636169, 0.6542618274688721, 0.14707720279693604, 0.37457093596458435, -0.5350408554077148, 0.784106433391571, -1.059281826019287, -0.7265215516090393, -0.9080483317375183, 0.4503827691078186, -0.314239501953125, -0.9012806415557861, 0.9855420589447021, 1.0139241218566895, 0.8747132420539856, -0.007875630632042885, 0.6213090419769287, -0.4230394661426544, 0.2076970934867859, -0.37508806586265564, 0.9530319571495056, -0.8365015387535095, -0.21101178228855133, -0.27801233530044556, -0.714128851890564, -0.3635942339897156, 0.8224290013313293, -0.1405763477087021, -0.0026519224047660828, 1.0453295707702637, 0.6518213152885437, -0.10889063775539398, 0.028895027935504913, -0.08530184626579285, 0.5681948065757751, 0.39689555764198303, 0.991832435131073, 0.6476365923881531, -0.774070143699646, 0.3452248275279999, -0.5633355975151062, -0.4366750121116638, -0.38737592101097107, -0.49454522132873535, -0.8396832346916199, -0.49507981538772583, -0.24416270852088928, -0.6315995454788208, -0.12918899953365326, 0.9841104745864868, 0.43614593148231506, -0.8987671732902527, -0.4227748513221741, -0.0929844006896019, 0.15776288509368896, -0.5757084488868713, -0.4204573631286621, 0.7346739768981934, -0.08871541172266006, -0.5488755106925964, 0.22853179275989532, -0.14928777515888214, 0.20137141644954681, 0.10683522373437881, -0.39546000957489014, -0.7147054672241211, 0.011896979063749313, 0.44287070631980896, 0.37781527638435364, -0.6984083652496338, -0.7088010311126709, 0.3091183304786682, -0.5397433638572693, 0.41998612880706787, -0.012224692851305008, -0.5394171476364136, 0.005819415673613548, 0.734135627746582, 0.4550926387310028, 0.6794514060020447, -0.05868801474571228, 0.06279104948043823, -0.6399518847465515, 0.23002998530864716, -0.03673936799168587, 0.2776673436164856, -0.03513975068926811, -0.3347117304801941, 0.7986213564872742, 0.6802520155906677, -0.5335997939109802, -1.0841478109359741, -0.4226788878440857, -1.4333654642105103, -0.02341453917324543, 1.1068886518478394, -0.012008290737867355, -0.47985246777534485, 0.2642262578010559, -0.11815108358860016, 0.17517001926898956, -0.3069009482860565, 0.7713347673416138, 0.7849178314208984, -0.3965785801410675, 0.16329456865787506, -0.6488111615180969, 0.3554478585720062, 0.5242963433265686, -1.228468894958496, -0.0931973084807396, 0.23796896636486053, 0.3290063142776489, 0.33962151408195496, 0.6124796271324158, -0.10396811366081238, 0.2608376145362854, 0.2138112485408783, 0.040860481560230255, -0.005355786997824907, 0.07499586790800095, -0.23047807812690735, 0.08063223212957382, -0.2577505111694336, -0.43739229440689087 ]
open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-orca-40k
open-llm-leaderboard
2023-08-27T12:41:55Z
201
0
[ "region:us" ]
null
2023-08-18T18:57:47Z
--- pretty_name: Evaluation run of yihan6324/llama2-7b-instructmining-orca-40k dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yihan6324/llama2-7b-instructmining-orca-40k](https://huggingface.co/yihan6324/llama2-7b-instructmining-orca-40k)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-orca-40k\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-18T00:53:27.654117](https://huggingface.co/datasets/open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-orca-40k/blob/main/results_2023-08-18T00%3A53%3A27.654117.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4847120233306423,\n\ \ \"acc_stderr\": 0.03527399847085323,\n \"acc_norm\": 0.4884455010512822,\n\ \ \"acc_norm_stderr\": 0.035257414280301984,\n \"mc1\": 0.36107711138310894,\n\ \ \"mc1_stderr\": 0.016814312844836882,\n \"mc2\": 0.5103220670450638,\n\ \ \"mc2_stderr\": 0.015890639542177364\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5298634812286689,\n \"acc_stderr\": 0.014585305840007105,\n\ \ \"acc_norm\": 0.5674061433447098,\n \"acc_norm_stderr\": 0.014478005694182524\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6196972714598685,\n\ \ \"acc_stderr\": 0.004844690404713595,\n \"acc_norm\": 0.8024297948615814,\n\ \ \"acc_norm_stderr\": 0.0039735233080143454\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\ \ \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.4740740740740741,\n\ \ \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.48026315789473684,\n \"acc_stderr\": 0.040657710025626036,\n\ \ \"acc_norm\": 0.48026315789473684,\n \"acc_norm_stderr\": 0.040657710025626036\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.48,\n\ \ \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5547169811320755,\n \"acc_stderr\": 0.030588052974270655,\n\ \ \"acc_norm\": 0.5547169811320755,\n \"acc_norm_stderr\": 0.030588052974270655\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4583333333333333,\n\ \ \"acc_stderr\": 0.04166666666666665,\n \"acc_norm\": 0.4583333333333333,\n\ \ \"acc_norm_stderr\": 0.04166666666666665\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.39,\n\ \ \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \ \ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.43352601156069365,\n\ \ \"acc_stderr\": 0.03778621079092055,\n \"acc_norm\": 0.43352601156069365,\n\ \ \"acc_norm_stderr\": 0.03778621079092055\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.044405219061793275,\n\ \ \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.044405219061793275\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n\ \ \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.40425531914893614,\n \"acc_stderr\": 0.03208115750788684,\n\ \ \"acc_norm\": 0.40425531914893614,\n \"acc_norm_stderr\": 0.03208115750788684\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4206896551724138,\n \"acc_stderr\": 0.0411391498118926,\n\ \ \"acc_norm\": 0.4206896551724138,\n \"acc_norm_stderr\": 0.0411391498118926\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.291005291005291,\n \"acc_stderr\": 0.023393826500484865,\n \"\ acc_norm\": 0.291005291005291,\n \"acc_norm_stderr\": 0.023393826500484865\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n\ \ \"acc_stderr\": 0.03809523809523811,\n \"acc_norm\": 0.23809523809523808,\n\ \ \"acc_norm_stderr\": 0.03809523809523811\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.5645161290322581,\n \"acc_stderr\": 0.028206225591502737,\n \"\ acc_norm\": 0.5645161290322581,\n \"acc_norm_stderr\": 0.028206225591502737\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.39408866995073893,\n \"acc_stderr\": 0.03438157967036543,\n \"\ acc_norm\": 0.39408866995073893,\n \"acc_norm_stderr\": 0.03438157967036543\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\"\ : 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6121212121212121,\n \"acc_stderr\": 0.03804913653971011,\n\ \ \"acc_norm\": 0.6121212121212121,\n \"acc_norm_stderr\": 0.03804913653971011\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6212121212121212,\n \"acc_stderr\": 0.03456088731993747,\n \"\ acc_norm\": 0.6212121212121212,\n \"acc_norm_stderr\": 0.03456088731993747\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.6528497409326425,\n \"acc_stderr\": 0.03435696168361355,\n\ \ \"acc_norm\": 0.6528497409326425,\n \"acc_norm_stderr\": 0.03435696168361355\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.46153846153846156,\n \"acc_stderr\": 0.025275892070240634,\n\ \ \"acc_norm\": 0.46153846153846156,\n \"acc_norm_stderr\": 0.025275892070240634\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24814814814814815,\n \"acc_stderr\": 0.026335739404055803,\n \ \ \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.026335739404055803\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.4831932773109244,\n \"acc_stderr\": 0.03246013680375308,\n \ \ \"acc_norm\": 0.4831932773109244,\n \"acc_norm_stderr\": 0.03246013680375308\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389023,\n \"\ acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389023\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.6623853211009174,\n \"acc_stderr\": 0.02027526598663891,\n \"\ acc_norm\": 0.6623853211009174,\n \"acc_norm_stderr\": 0.02027526598663891\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"\ acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.6372549019607843,\n \"acc_stderr\": 0.03374499356319355,\n \"\ acc_norm\": 0.6372549019607843,\n \"acc_norm_stderr\": 0.03374499356319355\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.6582278481012658,\n \"acc_stderr\": 0.03087453753755362,\n \ \ \"acc_norm\": 0.6582278481012658,\n \"acc_norm_stderr\": 0.03087453753755362\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.515695067264574,\n\ \ \"acc_stderr\": 0.0335412657542081,\n \"acc_norm\": 0.515695067264574,\n\ \ \"acc_norm_stderr\": 0.0335412657542081\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6030534351145038,\n \"acc_stderr\": 0.04291135671009225,\n\ \ \"acc_norm\": 0.6030534351145038,\n \"acc_norm_stderr\": 0.04291135671009225\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6363636363636364,\n \"acc_stderr\": 0.043913262867240704,\n \"\ acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.043913262867240704\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5092592592592593,\n\ \ \"acc_stderr\": 0.04832853553437055,\n \"acc_norm\": 0.5092592592592593,\n\ \ \"acc_norm_stderr\": 0.04832853553437055\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.48466257668711654,\n \"acc_stderr\": 0.039265223787088424,\n\ \ \"acc_norm\": 0.48466257668711654,\n \"acc_norm_stderr\": 0.039265223787088424\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.35714285714285715,\n\ \ \"acc_stderr\": 0.04547960999764376,\n \"acc_norm\": 0.35714285714285715,\n\ \ \"acc_norm_stderr\": 0.04547960999764376\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6310679611650486,\n \"acc_stderr\": 0.0477761518115674,\n\ \ \"acc_norm\": 0.6310679611650486,\n \"acc_norm_stderr\": 0.0477761518115674\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6752136752136753,\n\ \ \"acc_stderr\": 0.03067902276549883,\n \"acc_norm\": 0.6752136752136753,\n\ \ \"acc_norm_stderr\": 0.03067902276549883\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6704980842911877,\n\ \ \"acc_stderr\": 0.016808322261740467,\n \"acc_norm\": 0.6704980842911877,\n\ \ \"acc_norm_stderr\": 0.016808322261740467\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.47109826589595377,\n \"acc_stderr\": 0.02687408588351835,\n\ \ \"acc_norm\": 0.47109826589595377,\n \"acc_norm_stderr\": 0.02687408588351835\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25027932960893856,\n\ \ \"acc_stderr\": 0.014487500852850407,\n \"acc_norm\": 0.25027932960893856,\n\ \ \"acc_norm_stderr\": 0.014487500852850407\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5522875816993464,\n \"acc_stderr\": 0.02847293847803353,\n\ \ \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.02847293847803353\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5755627009646302,\n\ \ \"acc_stderr\": 0.028071928247946208,\n \"acc_norm\": 0.5755627009646302,\n\ \ \"acc_norm_stderr\": 0.028071928247946208\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5308641975308642,\n \"acc_stderr\": 0.02776768960683393,\n\ \ \"acc_norm\": 0.5308641975308642,\n \"acc_norm_stderr\": 0.02776768960683393\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3404255319148936,\n \"acc_stderr\": 0.028267657482650154,\n \ \ \"acc_norm\": 0.3404255319148936,\n \"acc_norm_stderr\": 0.028267657482650154\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.38722294654498046,\n\ \ \"acc_stderr\": 0.012441155326854924,\n \"acc_norm\": 0.38722294654498046,\n\ \ \"acc_norm_stderr\": 0.012441155326854924\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.47794117647058826,\n \"acc_stderr\": 0.030343264224213528,\n\ \ \"acc_norm\": 0.47794117647058826,\n \"acc_norm_stderr\": 0.030343264224213528\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.4395424836601307,\n \"acc_stderr\": 0.02007942040808792,\n \ \ \"acc_norm\": 0.4395424836601307,\n \"acc_norm_stderr\": 0.02007942040808792\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5454545454545454,\n\ \ \"acc_stderr\": 0.04769300568972744,\n \"acc_norm\": 0.5454545454545454,\n\ \ \"acc_norm_stderr\": 0.04769300568972744\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6081632653061224,\n \"acc_stderr\": 0.03125127591089165,\n\ \ \"acc_norm\": 0.6081632653061224,\n \"acc_norm_stderr\": 0.03125127591089165\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6766169154228856,\n\ \ \"acc_stderr\": 0.03307615947979034,\n \"acc_norm\": 0.6766169154228856,\n\ \ \"acc_norm_stderr\": 0.03307615947979034\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \ \ \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n\ \ \"acc_stderr\": 0.03799857454479637,\n \"acc_norm\": 0.39156626506024095,\n\ \ \"acc_norm_stderr\": 0.03799857454479637\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.03615507630310935,\n\ \ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03615507630310935\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36107711138310894,\n\ \ \"mc1_stderr\": 0.016814312844836882,\n \"mc2\": 0.5103220670450638,\n\ \ \"mc2_stderr\": 0.015890639542177364\n }\n}\n```" repo_url: https://huggingface.co/yihan6324/llama2-7b-instructmining-orca-40k leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|arc:challenge|25_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hellaswag|10_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:53:27.654117.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:53:27.654117.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_18T00_53_27.654117 path: - '**/details_harness|truthfulqa:mc|0_2023-08-18T00:53:27.654117.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-18T00:53:27.654117.parquet' - config_name: results data_files: - split: 2023_08_18T00_53_27.654117 path: - results_2023-08-18T00:53:27.654117.parquet - split: latest path: - results_2023-08-18T00:53:27.654117.parquet --- # Dataset Card for Evaluation run of yihan6324/llama2-7b-instructmining-orca-40k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yihan6324/llama2-7b-instructmining-orca-40k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yihan6324/llama2-7b-instructmining-orca-40k](https://huggingface.co/yihan6324/llama2-7b-instructmining-orca-40k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-orca-40k", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-18T00:53:27.654117](https://huggingface.co/datasets/open-llm-leaderboard/details_yihan6324__llama2-7b-instructmining-orca-40k/blob/main/results_2023-08-18T00%3A53%3A27.654117.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4847120233306423, "acc_stderr": 0.03527399847085323, "acc_norm": 0.4884455010512822, "acc_norm_stderr": 0.035257414280301984, "mc1": 0.36107711138310894, "mc1_stderr": 0.016814312844836882, "mc2": 0.5103220670450638, "mc2_stderr": 0.015890639542177364 }, "harness|arc:challenge|25": { "acc": 0.5298634812286689, "acc_stderr": 0.014585305840007105, "acc_norm": 0.5674061433447098, "acc_norm_stderr": 0.014478005694182524 }, "harness|hellaswag|10": { "acc": 0.6196972714598685, "acc_stderr": 0.004844690404713595, "acc_norm": 0.8024297948615814, "acc_norm_stderr": 0.0039735233080143454 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750575, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.48026315789473684, "acc_stderr": 0.040657710025626036, "acc_norm": 0.48026315789473684, "acc_norm_stderr": 0.040657710025626036 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5547169811320755, "acc_stderr": 0.030588052974270655, "acc_norm": 0.5547169811320755, "acc_norm_stderr": 0.030588052974270655 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4583333333333333, "acc_stderr": 0.04166666666666665, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.04166666666666665 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092055, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092055 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.40425531914893614, "acc_stderr": 0.03208115750788684, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.03208115750788684 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4206896551724138, "acc_stderr": 0.0411391498118926, "acc_norm": 0.4206896551724138, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.291005291005291, "acc_stderr": 0.023393826500484865, "acc_norm": 0.291005291005291, "acc_norm_stderr": 0.023393826500484865 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523811, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523811 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5645161290322581, "acc_stderr": 0.028206225591502737, "acc_norm": 0.5645161290322581, "acc_norm_stderr": 0.028206225591502737 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.03438157967036543, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.03438157967036543 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6121212121212121, "acc_stderr": 0.03804913653971011, "acc_norm": 0.6121212121212121, "acc_norm_stderr": 0.03804913653971011 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6212121212121212, "acc_stderr": 0.03456088731993747, "acc_norm": 0.6212121212121212, "acc_norm_stderr": 0.03456088731993747 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6528497409326425, "acc_stderr": 0.03435696168361355, "acc_norm": 0.6528497409326425, "acc_norm_stderr": 0.03435696168361355 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.46153846153846156, "acc_stderr": 0.025275892070240634, "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.025275892070240634 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.026335739404055803, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.026335739404055803 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4831932773109244, "acc_stderr": 0.03246013680375308, "acc_norm": 0.4831932773109244, "acc_norm_stderr": 0.03246013680375308 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6623853211009174, "acc_stderr": 0.02027526598663891, "acc_norm": 0.6623853211009174, "acc_norm_stderr": 0.02027526598663891 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4351851851851852, "acc_stderr": 0.03381200005643525, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.03381200005643525 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6372549019607843, "acc_stderr": 0.03374499356319355, "acc_norm": 0.6372549019607843, "acc_norm_stderr": 0.03374499356319355 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6582278481012658, "acc_stderr": 0.03087453753755362, "acc_norm": 0.6582278481012658, "acc_norm_stderr": 0.03087453753755362 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.515695067264574, "acc_stderr": 0.0335412657542081, "acc_norm": 0.515695067264574, "acc_norm_stderr": 0.0335412657542081 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009225, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009225 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5092592592592593, "acc_stderr": 0.04832853553437055, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.04832853553437055 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.48466257668711654, "acc_stderr": 0.039265223787088424, "acc_norm": 0.48466257668711654, "acc_norm_stderr": 0.039265223787088424 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|hendrycksTest-management|5": { "acc": 0.6310679611650486, "acc_stderr": 0.0477761518115674, "acc_norm": 0.6310679611650486, "acc_norm_stderr": 0.0477761518115674 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6752136752136753, "acc_stderr": 0.03067902276549883, "acc_norm": 0.6752136752136753, "acc_norm_stderr": 0.03067902276549883 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6704980842911877, "acc_stderr": 0.016808322261740467, "acc_norm": 0.6704980842911877, "acc_norm_stderr": 0.016808322261740467 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.47109826589595377, "acc_stderr": 0.02687408588351835, "acc_norm": 0.47109826589595377, "acc_norm_stderr": 0.02687408588351835 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25027932960893856, "acc_stderr": 0.014487500852850407, "acc_norm": 0.25027932960893856, "acc_norm_stderr": 0.014487500852850407 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5522875816993464, "acc_stderr": 0.02847293847803353, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.02847293847803353 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5755627009646302, "acc_stderr": 0.028071928247946208, "acc_norm": 0.5755627009646302, "acc_norm_stderr": 0.028071928247946208 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5308641975308642, "acc_stderr": 0.02776768960683393, "acc_norm": 0.5308641975308642, "acc_norm_stderr": 0.02776768960683393 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3404255319148936, "acc_stderr": 0.028267657482650154, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.028267657482650154 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.38722294654498046, "acc_stderr": 0.012441155326854924, "acc_norm": 0.38722294654498046, "acc_norm_stderr": 0.012441155326854924 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.47794117647058826, "acc_stderr": 0.030343264224213528, "acc_norm": 0.47794117647058826, "acc_norm_stderr": 0.030343264224213528 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4395424836601307, "acc_stderr": 0.02007942040808792, "acc_norm": 0.4395424836601307, "acc_norm_stderr": 0.02007942040808792 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5454545454545454, "acc_stderr": 0.04769300568972744, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.04769300568972744 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6081632653061224, "acc_stderr": 0.03125127591089165, "acc_norm": 0.6081632653061224, "acc_norm_stderr": 0.03125127591089165 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6766169154228856, "acc_stderr": 0.03307615947979034, "acc_norm": 0.6766169154228856, "acc_norm_stderr": 0.03307615947979034 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479637, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479637 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03615507630310935, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03615507630310935 }, "harness|truthfulqa:mc|0": { "mc1": 0.36107711138310894, "mc1_stderr": 0.016814312844836882, "mc2": 0.5103220670450638, "mc2_stderr": 0.015890639542177364 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7023782730102539, -0.8605707883834839, 0.2325820028781891, 0.18136917054653168, -0.18162061274051666, -0.07809355854988098, 0.04926532879471779, -0.2758120894432068, 0.5623376369476318, -0.02880742773413658, -0.4907999336719513, -0.6839548945426941, -0.41704729199409485, 0.217979297041893, -0.04914294183254242, 0.8468915820121765, -0.1738821119070053, -0.1332077980041504, 0.12748609483242035, -0.0715571939945221, -0.271758109331131, -0.34388527274131775, -0.56356281042099, -0.3497219383716583, 0.20544934272766113, 0.4555518925189972, 0.43032175302505493, 0.8620287179946899, 0.6637744307518005, 0.2883864641189575, -0.31431859731674194, 0.02376462332904339, -0.14983995258808136, -0.295911580324173, 0.3903106153011322, -0.3973366618156433, -0.872065007686615, 0.2793009579181671, 0.7460288405418396, 0.6314865350723267, -0.08984493464231491, 0.31810373067855835, 0.044971711933612823, 0.5682406425476074, -0.37389400601387024, 0.07542814314365387, -0.2546409070491791, 0.2328067123889923, -0.2623733580112457, -0.28612345457077026, -0.27860528230667114, -0.2787330448627472, -0.14062561094760895, -0.8639746308326721, 0.24198472499847412, 0.29961445927619934, 1.5123567581176758, -0.12704598903656006, -0.2792113423347473, 0.0677046924829483, -0.06342596560716629, 1.0088976621627808, -0.9045820832252502, 0.3266662657260895, 0.7481037378311157, 0.10055701434612274, -0.21418496966362, -0.5768989324569702, -0.6435393691062927, 0.060467109084129333, -0.35083168745040894, 0.3341811001300812, -0.009980983100831509, -0.16534192860126495, 0.39559105038642883, 0.6680982112884521, -0.6678967475891113, 0.18927425146102905, -0.6294108033180237, -0.14324146509170532, 1.0906257629394531, 0.3630947470664978, 0.07912451773881912, -0.3491809666156769, -0.6794705986976624, -0.6970582008361816, -0.4152972400188446, 0.29118630290031433, 0.45655977725982666, 0.33465713262557983, -0.39709749817848206, 0.7156630158424377, -0.39920538663864136, 0.5759810209274292, 0.41207754611968994, -0.06302899867296219, 0.8692042231559753, -0.6762632727622986, -0.535521924495697, -0.047287583351135254, 1.0785285234451294, 0.5543646812438965, 0.02960709109902382, 0.26490917801856995, 0.021152900531888008, -0.06919071078300476, 0.0003164790105074644, -0.8377018570899963, -0.30433133244514465, 0.17327073216438293, -0.4242134392261505, -0.49044176936149597, 0.34207767248153687, -0.8782961964607239, 0.09570305049419403, -0.005272301379591227, 0.3826289176940918, -0.5008894801139832, -0.11762852966785431, 0.26542651653289795, -0.40135952830314636, 0.8569926619529724, -0.17241129279136658, -0.8128688335418701, 0.3979640007019043, 0.5000478029251099, 0.7477988004684448, -0.10655544698238373, -0.42383819818496704, -0.12518663704395294, -0.11847370117902756, -0.3077905774116516, 0.5599153637886047, -0.23572233319282532, -0.4395790100097656, -0.2756747603416443, 0.28777995705604553, -0.24509194493293762, -0.3338143825531006, 0.758582353591919, -0.22933591902256012, 0.17894619703292847, -0.45323869585990906, -0.6262420415878296, 0.09163069725036621, 0.3587820827960968, -0.4373542368412018, 1.2875107526779175, 0.20801296830177307, -0.8260858058929443, 0.42374053597450256, -0.6280624270439148, -0.1843036562204361, -0.03297711908817291, -0.053086694329977036, -0.7893726229667664, -0.2861715853214264, 0.18590512871742249, 0.38694214820861816, -0.13424938917160034, -0.1580212265253067, -0.3867062032222748, -0.34395942091941833, 0.33616384863853455, -0.1499405950307846, 1.2390223741531372, -0.04032347723841667, -0.7782410979270935, -0.13781864941120148, -1.247757077217102, 0.3139010965824127, 0.22637364268302917, -0.380124568939209, -0.17102015018463135, -0.44360169768333435, -0.0418611578643322, 0.14446277916431427, 0.29889917373657227, -0.8161212801933289, 0.314613938331604, -0.36914265155792236, 0.20192328095436096, 1.2261799573898315, 0.02721969597041607, 0.1332399547100067, -0.5499962568283081, 0.5305842757225037, 0.1614575833082199, 0.19470161199569702, 0.39021405577659607, -0.6045948266983032, -0.8546227216720581, -0.4511485695838928, -0.07072797417640686, 0.5788842439651489, -0.19083699584007263, 1.1055575609207153, 0.10615398734807968, -0.9052698612213135, -0.44272691011428833, -0.1479349434375763, 0.5010223984718323, 0.7829861044883728, 0.6065033078193665, -0.10111212730407715, -0.6289679408073425, -1.0836591720581055, -0.26484790444374084, -0.18394912779331207, 0.16749189794063568, 0.216217502951622, 1.028240442276001, -0.21726667881011963, 0.5882382988929749, -1.040318250656128, -0.2412709891796112, 0.16262301802635193, -0.058254651725292206, 0.7849324941635132, 0.7430884838104248, 0.6077681183815002, -0.6808716058731079, -0.5028397440910339, 0.1770344376564026, -0.8771259784698486, -0.07969804853200912, 0.12276065349578857, -0.2930966317653656, 0.17083916068077087, 0.14800697565078735, -0.6814268231391907, 0.5599878430366516, 0.2277575135231018, -1.0360679626464844, 1.090097427368164, -0.3369411528110504, 0.5597554445266724, -0.9531583189964294, 0.19968818128108978, -0.08229481428861618, 0.043235912919044495, -0.4644988179206848, 0.02879774197936058, 0.10322599858045578, 0.4642317295074463, -0.4802888035774231, 0.787488579750061, -0.6934616565704346, -0.09041083604097366, 0.42407700419425964, 0.14067423343658447, -0.11025362461805344, 0.36701419949531555, -0.210551917552948, 0.7863081097602844, 0.7470951080322266, -0.48006466031074524, 0.5336485505104065, 0.4325893521308899, -0.2433251440525055, 0.7375378012657166, -0.49953344464302063, -0.2589297294616699, 0.31448397040367126, 0.004673339426517487, -0.8453120589256287, -0.5145495533943176, 0.05978700518608093, -0.5755653977394104, -0.10093817114830017, 0.3946058452129364, -0.2924021780490875, -0.7878820300102234, -0.9553384184837341, 0.38579821586608887, 0.6698814034461975, -0.4601525664329529, -0.1700265109539032, 0.08497956395149231, 0.1012386754155159, -0.8391592502593994, -0.8723142147064209, -0.4920143187046051, -0.2380734086036682, -0.6936306953430176, 0.33868417143821716, -0.2675184905529022, -0.2514803409576416, -0.10475096106529236, -0.22215287387371063, -0.30536434054374695, -0.003342771204188466, 0.1573801189661026, 0.6660286784172058, -0.40488627552986145, -0.28340238332748413, -0.2448250949382782, -0.17289450764656067, 0.26636433601379395, -0.10666874051094055, 0.3722606301307678, -0.4530366063117981, -0.4467860162258148, -0.4407019317150116, -0.04252397641539574, 0.6864529848098755, -0.10831235349178314, 0.7152437567710876, 0.4179425537586212, -0.28418466448783875, -0.03308246657252312, -0.30774688720703125, -0.2534632980823517, -0.5797041654586792, 0.29644063115119934, -0.4837733209133148, -1.0197957754135132, 0.7854149341583252, 0.5558991432189941, 0.11859234422445297, 1.1411322355270386, 0.5872237086296082, -0.27651795744895935, 1.0243855714797974, 0.09217210859060287, 0.30177950859069824, 0.3930160701274872, -0.6692628264427185, 0.1087823435664177, -0.9197173118591309, -0.3651007413864136, -0.5999717116355896, -0.4731162488460541, -0.7293276190757751, -0.038880474865436554, 0.299919992685318, 0.13174140453338623, -0.6842005252838135, 0.6033663749694824, -0.8482301831245422, 0.5498640537261963, 0.5592589974403381, 0.2640107274055481, 0.18035061657428741, -0.19550055265426636, -0.3464089035987854, -0.06958503276109695, -0.4814777672290802, -0.2597532570362091, 1.2240060567855835, 0.2566238343715668, 0.7381199598312378, 0.0851965919137001, 0.8602312207221985, 0.12658827006816864, -0.09747258573770523, -0.5681840777397156, 0.6399075388908386, 0.11963866651058197, -0.7762306928634644, -0.40390142798423767, -0.49016448855400085, -1.1130177974700928, 0.3610779345035553, -0.09756697714328766, -0.878588855266571, 0.14567776024341583, 0.02918512187898159, -0.24960392713546753, 0.5046745538711548, -0.5346776843070984, 0.8320468664169312, -0.1609717309474945, -0.4039146602153778, 0.08061930537223816, -0.8465844392776489, 0.5014473795890808, 0.18237453699111938, 0.22861391305923462, 0.0665232464671135, 0.21003925800323486, 1.207557201385498, -0.8367665410041809, 0.4506535530090332, 0.10154173523187637, 0.017058653756976128, 0.34495779871940613, -0.2129150927066803, 0.4984890818595886, 0.0990692600607872, -0.036961186677217484, -0.11312250047922134, 0.31769809126853943, -0.8371849060058594, -0.055334024131298065, 0.911526620388031, -0.9665328860282898, -0.5722823739051819, -0.8972401022911072, -0.5241637229919434, 0.10700050741434097, 0.5381933450698853, 0.373324990272522, 0.5395234823226929, 0.013292227871716022, 0.4321395754814148, 0.8525603413581848, -0.16854889690876007, 0.5941910147666931, 0.23603034019470215, 0.10717128217220306, -0.6476907730102539, 0.823223352432251, 0.10080159455537796, 0.34653040766716003, 0.26709461212158203, 0.3491344451904297, -0.5515848994255066, -0.20308180153369904, -0.22224129736423492, 0.49949467182159424, -0.6384209990501404, -0.2965194284915924, -0.3765866756439209, -0.3972834050655365, -0.7436786890029907, -0.5968332886695862, -0.3266013562679291, -0.49402400851249695, -0.5081952810287476, -0.4692641794681549, 0.5790063738822937, 0.5156236886978149, -0.39876365661621094, 0.025708217173814774, -0.4655596613883972, 0.2657100558280945, 0.33635202050209045, 0.512696385383606, -0.3312095105648041, -0.5928400158882141, 0.04659150913357735, -0.12761889398097992, -0.5884249210357666, -0.9697653651237488, 0.3457648754119873, -0.04062950611114502, 0.5100489854812622, 0.6119548082351685, 0.08128821104764938, 0.8856163024902344, -0.1577276736497879, 1.0696569681167603, 0.30844318866729736, -0.8118591904640198, 0.716166079044342, -0.29762062430381775, 0.15440669655799866, 0.6166326999664307, 0.1517575979232788, -0.18859699368476868, -0.6470495462417603, -1.299180030822754, -0.8127631545066833, 0.6962887644767761, 0.3716903030872345, -0.27176278829574585, 0.04377218708395958, 0.18769530951976776, -0.2692011892795563, -0.17721661925315857, -0.6839179396629333, -0.8967792987823486, -0.15088188648223877, -0.42239949107170105, 0.11345944553613663, 0.010169326327741146, -0.3305571377277374, -0.7834164500236511, 0.9388715028762817, 0.012244501151144505, 0.5760286450386047, 0.4710959792137146, 0.08779124915599823, 0.0693785548210144, 0.45343679189682007, 0.9152171015739441, 0.7136406898498535, -0.45969316363334656, 0.4146694540977478, 0.4415956735610962, -1.0291900634765625, 0.4364403784275055, 0.3103681206703186, -0.04479195550084114, -0.05698583275079727, 0.4805046617984772, 0.3799983859062195, 0.008532492443919182, -0.2320839911699295, 0.5732441544532776, -0.006798290181905031, -0.5307953357696533, -0.3987087309360504, 0.1219404861330986, -0.12219928950071335, 0.02868838980793953, 0.38991817831993103, -0.17646543681621552, -0.03446267172694206, -0.5228609442710876, 0.440470427274704, 0.3730788230895996, -0.43868717551231384, -0.15664228796958923, 0.7358493804931641, -0.20869848132133484, -0.1446712464094162, 0.3576629161834717, -0.18537107110023499, -0.6015674471855164, 1.1779594421386719, 0.5567362308502197, 0.6676265001296997, -0.26412147283554077, -0.08174706995487213, 0.9060918688774109, 0.3614517152309418, -0.06368175148963928, 0.5371400713920593, 0.315621018409729, -0.2559052109718323, 0.16655181348323822, -0.8435508608818054, -0.05425079166889191, 0.18903562426567078, -0.8618835210800171, 0.3256910741329193, -0.5293400883674622, -0.21200546622276306, 0.03397095575928688, 0.45337989926338196, -0.43516650795936584, 0.5513526797294617, -0.4056597352027893, 1.2282919883728027, -0.9620905518531799, 0.734903872013092, 0.7555127739906311, -0.5561286211013794, -1.0727875232696533, -0.5535420775413513, 0.021754655987024307, -0.7848606705665588, 0.5799223780632019, -0.045255038887262344, 0.1619497835636139, -0.09425456076860428, -0.7379373908042908, -0.9213563799858093, 1.4141528606414795, -0.0193850826472044, -0.4065038561820984, 0.2661569118499756, -0.05055563524365425, 0.45206254720687866, 0.12433870881795883, 0.6058360934257507, 0.7784954905509949, 0.8138750791549683, -0.12632595002651215, -0.7474588751792908, 0.3361412584781647, -0.49713489413261414, -0.32338035106658936, 0.4418739974498749, -0.9565404057502747, 1.2349625825881958, -0.006584445014595985, 0.2036590874195099, -0.09138096123933792, 0.6923561096191406, 0.8144106268882751, 0.253765732049942, 0.3830380141735077, 0.8723179697990417, 0.8480664491653442, -0.46674737334251404, 1.021469235420227, -0.17506465315818787, 0.8408749103546143, 0.7009262442588806, 0.22366830706596375, 0.7698215246200562, 0.6469753384590149, -0.5438132882118225, 0.5850338935852051, 0.8535635471343994, -0.3240584433078766, 0.4026932418346405, 0.27308234572410583, -0.1007712185382843, -0.12109529972076416, 0.3491836488246918, -0.8982454538345337, 0.14384979009628296, 0.07930675148963928, -0.3426789939403534, 0.060793161392211914, -0.47580432891845703, 0.30416205525398254, -0.11862506717443466, -0.03331555426120758, 0.3572341799736023, 0.06864208728075027, -0.45129498839378357, 0.929194450378418, -0.10833270102739334, 0.7370480298995972, -0.5421978235244751, -0.10951642692089081, -0.4083186686038971, 0.5948818922042847, -0.4722593426704407, -1.0415079593658447, 0.15928637981414795, 0.0595586933195591, -0.11508940905332565, -0.1604464054107666, 0.6869663000106812, -0.19365473091602325, -0.7149947285652161, 0.1441211700439453, 0.034311361610889435, 0.13338710367679596, 0.5169945955276489, -0.6818029284477234, -0.33791208267211914, -0.02964475005865097, -0.5692619681358337, 0.15334220230579376, 0.3060671091079712, 0.24959349632263184, 0.5563485026359558, 0.6424225568771362, 0.16067753732204437, 0.3754797577857971, -0.5496479272842407, 0.8238581418991089, -1.02620267868042, -0.7090255618095398, -0.9234737157821655, 0.4284341037273407, -0.29955050349235535, -0.8893590569496155, 1.0038411617279053, 1.0212504863739014, 0.9113905429840088, 0.018671870231628418, 0.6280969381332397, -0.4172910153865814, 0.2356330305337906, -0.38005706667900085, 0.9396559000015259, -0.869269847869873, -0.2009579837322235, -0.2507590651512146, -0.7287423610687256, -0.35885655879974365, 0.8546395897865295, -0.15814107656478882, 0.0591137520968914, 1.0516530275344849, 0.6801450252532959, -0.09669521450996399, 0.05834893137216568, -0.07757586240768433, 0.5736010670661926, 0.37415018677711487, 1.0192716121673584, 0.6329425573348999, -0.7741804718971252, 0.36463165283203125, -0.5111536383628845, -0.43024423718452454, -0.3841139078140259, -0.49225538969039917, -0.8790619969367981, -0.4516808092594147, -0.22429774701595306, -0.6085289120674133, -0.11058735847473145, 0.9824157357215881, 0.464372456073761, -0.9222866296768188, -0.4399271011352539, -0.10731631517410278, 0.1473688930273056, -0.6101318597793579, -0.4048996865749359, 0.7838894128799438, -0.10984137654304504, -0.538968026638031, 0.1914760172367096, -0.13077689707279205, 0.1876511573791504, 0.07621302455663681, -0.38687261939048767, -0.6917864084243774, 0.014697332866489887, 0.4040566086769104, 0.39531993865966797, -0.6846196055412292, -0.6911976933479309, 0.28097864985466003, -0.533797025680542, 0.423779159784317, -0.04090714827179909, -0.5243929624557495, 0.05742344632744789, 0.6961207985877991, 0.46404892206192017, 0.6631001234054565, -0.020465681329369545, 0.07012946158647537, -0.6354634761810303, 0.17235611379146576, -0.015570505522191525, 0.28611865639686584, -0.04941311851143837, -0.31257984042167664, 0.7928023338317871, 0.6471059918403625, -0.5369259119033813, -1.052367091178894, -0.4225379526615143, -1.4404699802398682, -0.004488762933760881, 1.1125977039337158, -0.03669020161032677, -0.4929974675178528, 0.24032187461853027, -0.14450034499168396, 0.22154903411865234, -0.32913830876350403, 0.7904959321022034, 0.7237784266471863, -0.3849613070487976, 0.14696048200130463, -0.644830048084259, 0.3785085678100586, 0.5431968569755554, -1.2225552797317505, -0.1242356225848198, 0.20793919265270233, 0.29065993428230286, 0.3526032567024231, 0.6151083707809448, -0.10766729712486267, 0.27810636162757874, 0.23091626167297363, 0.04925529658794403, -0.01663290150463581, 0.09265310317277908, -0.21840530633926392, 0.07578882575035095, -0.24693095684051514, -0.4836614727973938 ]
open-llm-leaderboard/details_yihan6324__llama2-13b-instructmining-40k-sharegpt
open-llm-leaderboard
2023-08-27T12:41:57Z
201
0
[ "region:us" ]
null
2023-08-18T18:57:56Z
--- pretty_name: Evaluation run of yihan6324/llama2-13b-instructmining-40k-sharegpt dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yihan6324/llama2-13b-instructmining-40k-sharegpt](https://huggingface.co/yihan6324/llama2-13b-instructmining-40k-sharegpt)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yihan6324__llama2-13b-instructmining-40k-sharegpt\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-17T15:06:33.773565](https://huggingface.co/datasets/open-llm-leaderboard/details_yihan6324__llama2-13b-instructmining-40k-sharegpt/blob/main/results_2023-08-17T15%3A06%3A33.773565.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5659221179678169,\n\ \ \"acc_stderr\": 0.03435610194042996,\n \"acc_norm\": 0.5698526105353496,\n\ \ \"acc_norm_stderr\": 0.03433517528186645,\n \"mc1\": 0.35862913096695226,\n\ \ \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5244082340441981,\n\ \ \"mc2_stderr\": 0.015623466277080963\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5656996587030717,\n \"acc_stderr\": 0.01448470304885736,\n\ \ \"acc_norm\": 0.5998293515358362,\n \"acc_norm_stderr\": 0.014317197787809169\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6328420633339972,\n\ \ \"acc_stderr\": 0.004810449343572395,\n \"acc_norm\": 0.8306114319856602,\n\ \ \"acc_norm_stderr\": 0.003743281749373634\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \ \ \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\ \ \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6150943396226415,\n \"acc_stderr\": 0.02994649856769995,\n\ \ \"acc_norm\": 0.6150943396226415,\n \"acc_norm_stderr\": 0.02994649856769995\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6111111111111112,\n\ \ \"acc_stderr\": 0.04076663253918567,\n \"acc_norm\": 0.6111111111111112,\n\ \ \"acc_norm_stderr\": 0.04076663253918567\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n\ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n\ \ \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.5895953757225434,\n\ \ \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383887,\n\ \ \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383887\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4595744680851064,\n \"acc_stderr\": 0.03257901482099835,\n\ \ \"acc_norm\": 0.4595744680851064,\n \"acc_norm_stderr\": 0.03257901482099835\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n\ \ \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.29365079365079366,\n \"acc_stderr\": 0.023456037383982026,\n \"\ acc_norm\": 0.29365079365079366,\n \"acc_norm_stderr\": 0.023456037383982026\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n\ \ \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n\ \ \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.6709677419354839,\n \"acc_stderr\": 0.026729499068349958,\n \"\ acc_norm\": 0.6709677419354839,\n \"acc_norm_stderr\": 0.026729499068349958\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"\ acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\"\ : 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.0364620496325381,\n\ \ \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.0364620496325381\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7171717171717171,\n \"acc_stderr\": 0.03208779558786753,\n \"\ acc_norm\": 0.7171717171717171,\n \"acc_norm_stderr\": 0.03208779558786753\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.02811209121011748,\n\ \ \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.02811209121011748\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5333333333333333,\n \"acc_stderr\": 0.025294608023986472,\n\ \ \"acc_norm\": 0.5333333333333333,\n \"acc_norm_stderr\": 0.025294608023986472\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073838,\n \ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073838\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5378151260504201,\n \"acc_stderr\": 0.032385469487589795,\n\ \ \"acc_norm\": 0.5378151260504201,\n \"acc_norm_stderr\": 0.032385469487589795\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.037579499229433426,\n \"\ acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.037579499229433426\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7669724770642202,\n \"acc_stderr\": 0.018125669180861507,\n \"\ acc_norm\": 0.7669724770642202,\n \"acc_norm_stderr\": 0.018125669180861507\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4305555555555556,\n \"acc_stderr\": 0.03376922151252336,\n \"\ acc_norm\": 0.4305555555555556,\n \"acc_norm_stderr\": 0.03376922151252336\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7647058823529411,\n \"acc_stderr\": 0.029771775228145638,\n \"\ acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.029771775228145638\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \ \ \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n\ \ \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n\ \ \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6183206106870229,\n \"acc_stderr\": 0.04260735157644559,\n\ \ \"acc_norm\": 0.6183206106870229,\n \"acc_norm_stderr\": 0.04260735157644559\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.041032038305145124,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.041032038305145124\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n\ \ \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n\ \ \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6625766871165644,\n \"acc_stderr\": 0.03714908409935574,\n\ \ \"acc_norm\": 0.6625766871165644,\n \"acc_norm_stderr\": 0.03714908409935574\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n\ \ \"acc_stderr\": 0.04464285714285713,\n \"acc_norm\": 0.33035714285714285,\n\ \ \"acc_norm_stderr\": 0.04464285714285713\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n\ \ \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n\ \ \"acc_stderr\": 0.02390232554956041,\n \"acc_norm\": 0.8418803418803419,\n\ \ \"acc_norm_stderr\": 0.02390232554956041\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \ \ \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7509578544061303,\n\ \ \"acc_stderr\": 0.015464676163395958,\n \"acc_norm\": 0.7509578544061303,\n\ \ \"acc_norm_stderr\": 0.015464676163395958\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.630057803468208,\n \"acc_stderr\": 0.025992472029306386,\n\ \ \"acc_norm\": 0.630057803468208,\n \"acc_norm_stderr\": 0.025992472029306386\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4122905027932961,\n\ \ \"acc_stderr\": 0.01646320023811453,\n \"acc_norm\": 0.4122905027932961,\n\ \ \"acc_norm_stderr\": 0.01646320023811453\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6013071895424836,\n \"acc_stderr\": 0.028036092273891776,\n\ \ \"acc_norm\": 0.6013071895424836,\n \"acc_norm_stderr\": 0.028036092273891776\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6205787781350482,\n\ \ \"acc_stderr\": 0.027559949802347817,\n \"acc_norm\": 0.6205787781350482,\n\ \ \"acc_norm_stderr\": 0.027559949802347817\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6265432098765432,\n \"acc_stderr\": 0.026915003011380154,\n\ \ \"acc_norm\": 0.6265432098765432,\n \"acc_norm_stderr\": 0.026915003011380154\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4219858156028369,\n \"acc_stderr\": 0.029462189233370593,\n \ \ \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.029462189233370593\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4256844850065189,\n\ \ \"acc_stderr\": 0.012628393551811943,\n \"acc_norm\": 0.4256844850065189,\n\ \ \"acc_norm_stderr\": 0.012628393551811943\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5735294117647058,\n \"acc_stderr\": 0.03004261583271486,\n\ \ \"acc_norm\": 0.5735294117647058,\n \"acc_norm_stderr\": 0.03004261583271486\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5784313725490197,\n \"acc_stderr\": 0.01997742260022747,\n \ \ \"acc_norm\": 0.5784313725490197,\n \"acc_norm_stderr\": 0.01997742260022747\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n\ \ \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n\ \ \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.03093285879278985,\n\ \ \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.03093285879278985\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7761194029850746,\n\ \ \"acc_stderr\": 0.029475250236017193,\n \"acc_norm\": 0.7761194029850746,\n\ \ \"acc_norm_stderr\": 0.029475250236017193\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n\ \ \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n\ \ \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n\ \ \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35862913096695226,\n\ \ \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5244082340441981,\n\ \ \"mc2_stderr\": 0.015623466277080963\n }\n}\n```" repo_url: https://huggingface.co/yihan6324/llama2-13b-instructmining-40k-sharegpt leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|arc:challenge|25_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hellaswag|10_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T15:06:33.773565.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:06:33.773565.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T15_06_33.773565 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T15:06:33.773565.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T15:06:33.773565.parquet' - config_name: results data_files: - split: 2023_08_17T15_06_33.773565 path: - results_2023-08-17T15:06:33.773565.parquet - split: latest path: - results_2023-08-17T15:06:33.773565.parquet --- # Dataset Card for Evaluation run of yihan6324/llama2-13b-instructmining-40k-sharegpt ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yihan6324/llama2-13b-instructmining-40k-sharegpt - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yihan6324/llama2-13b-instructmining-40k-sharegpt](https://huggingface.co/yihan6324/llama2-13b-instructmining-40k-sharegpt) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yihan6324__llama2-13b-instructmining-40k-sharegpt", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T15:06:33.773565](https://huggingface.co/datasets/open-llm-leaderboard/details_yihan6324__llama2-13b-instructmining-40k-sharegpt/blob/main/results_2023-08-17T15%3A06%3A33.773565.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5659221179678169, "acc_stderr": 0.03435610194042996, "acc_norm": 0.5698526105353496, "acc_norm_stderr": 0.03433517528186645, "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5244082340441981, "mc2_stderr": 0.015623466277080963 }, "harness|arc:challenge|25": { "acc": 0.5656996587030717, "acc_stderr": 0.01448470304885736, "acc_norm": 0.5998293515358362, "acc_norm_stderr": 0.014317197787809169 }, "harness|hellaswag|10": { "acc": 0.6328420633339972, "acc_stderr": 0.004810449343572395, "acc_norm": 0.8306114319856602, "acc_norm_stderr": 0.003743281749373634 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.625, "acc_stderr": 0.039397364351956274, "acc_norm": 0.625, "acc_norm_stderr": 0.039397364351956274 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6150943396226415, "acc_stderr": 0.02994649856769995, "acc_norm": 0.6150943396226415, "acc_norm_stderr": 0.02994649856769995 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6111111111111112, "acc_stderr": 0.04076663253918567, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.04076663253918567 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5895953757225434, "acc_stderr": 0.03750757044895537, "acc_norm": 0.5895953757225434, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383887, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383887 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099835, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.29365079365079366, "acc_stderr": 0.023456037383982026, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.023456037383982026 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6709677419354839, "acc_stderr": 0.026729499068349958, "acc_norm": 0.6709677419354839, "acc_norm_stderr": 0.026729499068349958 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.03502544650845872, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.0364620496325381, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.0364620496325381 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7171717171717171, "acc_stderr": 0.03208779558786753, "acc_norm": 0.7171717171717171, "acc_norm_stderr": 0.03208779558786753 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8134715025906736, "acc_stderr": 0.02811209121011748, "acc_norm": 0.8134715025906736, "acc_norm_stderr": 0.02811209121011748 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5333333333333333, "acc_stderr": 0.025294608023986472, "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.025294608023986472 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073838, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073838 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5378151260504201, "acc_stderr": 0.032385469487589795, "acc_norm": 0.5378151260504201, "acc_norm_stderr": 0.032385469487589795 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.037579499229433426, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.037579499229433426 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7669724770642202, "acc_stderr": 0.018125669180861507, "acc_norm": 0.7669724770642202, "acc_norm_stderr": 0.018125669180861507 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252336, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252336 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.029771775228145638, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.029771775228145638 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6183206106870229, "acc_stderr": 0.04260735157644559, "acc_norm": 0.6183206106870229, "acc_norm_stderr": 0.04260735157644559 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.041032038305145124, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.041032038305145124 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6625766871165644, "acc_stderr": 0.03714908409935574, "acc_norm": 0.6625766871165644, "acc_norm_stderr": 0.03714908409935574 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285713, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285713 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.02390232554956041, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.02390232554956041 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7509578544061303, "acc_stderr": 0.015464676163395958, "acc_norm": 0.7509578544061303, "acc_norm_stderr": 0.015464676163395958 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.630057803468208, "acc_stderr": 0.025992472029306386, "acc_norm": 0.630057803468208, "acc_norm_stderr": 0.025992472029306386 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4122905027932961, "acc_stderr": 0.01646320023811453, "acc_norm": 0.4122905027932961, "acc_norm_stderr": 0.01646320023811453 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6013071895424836, "acc_stderr": 0.028036092273891776, "acc_norm": 0.6013071895424836, "acc_norm_stderr": 0.028036092273891776 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6205787781350482, "acc_stderr": 0.027559949802347817, "acc_norm": 0.6205787781350482, "acc_norm_stderr": 0.027559949802347817 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6265432098765432, "acc_stderr": 0.026915003011380154, "acc_norm": 0.6265432098765432, "acc_norm_stderr": 0.026915003011380154 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4219858156028369, "acc_stderr": 0.029462189233370593, "acc_norm": 0.4219858156028369, "acc_norm_stderr": 0.029462189233370593 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4256844850065189, "acc_stderr": 0.012628393551811943, "acc_norm": 0.4256844850065189, "acc_norm_stderr": 0.012628393551811943 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5735294117647058, "acc_stderr": 0.03004261583271486, "acc_norm": 0.5735294117647058, "acc_norm_stderr": 0.03004261583271486 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5784313725490197, "acc_stderr": 0.01997742260022747, "acc_norm": 0.5784313725490197, "acc_norm_stderr": 0.01997742260022747 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.03093285879278985, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.03093285879278985 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7761194029850746, "acc_stderr": 0.029475250236017193, "acc_norm": 0.7761194029850746, "acc_norm_stderr": 0.029475250236017193 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333045, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5244082340441981, "mc2_stderr": 0.015623466277080963 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7233323454856873, -0.8421386480331421, 0.2702588140964508, 0.2239130288362503, -0.17546477913856506, -0.05969563499093056, 0.01447946485131979, -0.21469785273075104, 0.5500335693359375, -0.06145055964589119, -0.5139384269714355, -0.6644901633262634, -0.4660903215408325, 0.1995759904384613, -0.051269643008708954, 0.8646562099456787, -0.22768811881542206, -0.15274885296821594, 0.10725287348031998, -0.044387899339199066, -0.26293694972991943, -0.35171687602996826, -0.500745415687561, -0.3376869857311249, 0.1715771108865738, 0.44985640048980713, 0.43254217505455017, 0.8092092871665955, 0.6735733151435852, 0.3053467869758606, -0.2830818295478821, 0.013255617581307888, -0.1702290177345276, -0.3248012363910675, 0.41490015387535095, -0.3439877927303314, -0.8540967106819153, 0.3004642724990845, 0.7592801451683044, 0.6229084134101868, -0.08561151474714279, 0.3147335946559906, 0.06764071434736252, 0.5838833451271057, -0.32979118824005127, 0.07650958001613617, -0.2645353078842163, 0.22247503697872162, -0.20436613261699677, -0.2794029712677002, -0.2740013301372528, -0.27458688616752625, -0.10886527597904205, -0.8630970120429993, 0.2702847123146057, 0.30813053250312805, 1.5414087772369385, -0.13482564687728882, -0.26973721385002136, 0.08116065710783005, -0.10622459650039673, 1.0180981159210205, -0.8670067191123962, 0.34138810634613037, 0.7565895915031433, 0.12080595642328262, -0.1488380879163742, -0.6107983589172363, -0.6298168897628784, 0.07065914571285248, -0.3964300751686096, 0.3567456603050232, -0.07208415120840073, -0.1449083387851715, 0.3754986524581909, 0.7019190192222595, -0.6805154085159302, 0.14495407044887543, -0.6391406655311584, -0.145416721701622, 1.100892186164856, 0.3249730169773102, 0.09484462440013885, -0.34709858894348145, -0.7174551486968994, -0.6369384527206421, -0.41136637330055237, 0.2519789934158325, 0.4267960786819458, 0.3022834062576294, -0.4132901430130005, 0.6931300163269043, -0.4293769299983978, 0.5473155379295349, 0.4138737618923187, 0.05606529489159584, 0.8883501887321472, -0.6973670125007629, -0.5382924675941467, -0.05660923570394516, 1.0961148738861084, 0.5843793153762817, 0.060207657516002655, 0.21536578238010406, 0.013796433806419373, -0.10418708622455597, -0.0009073975961655378, -0.8596280813217163, -0.31594228744506836, 0.17557649314403534, -0.39211368560791016, -0.45405831933021545, 0.3397764265537262, -0.8673388957977295, 0.165511354804039, -0.05678889900445938, 0.44616204500198364, -0.48870477080345154, -0.12605606019496918, 0.24180848896503448, -0.36517810821533203, 0.8184798359870911, -0.15740735828876495, -0.7796943187713623, 0.40549951791763306, 0.538630485534668, 0.7884491682052612, -0.11171923577785492, -0.4428982734680176, -0.1284143030643463, -0.1137164831161499, -0.27536818385124207, 0.5303577780723572, -0.21967767179012299, -0.3997473120689392, -0.29805952310562134, 0.3018283545970917, -0.2742733955383301, -0.35063353180885315, 0.7503799796104431, -0.22409598529338837, 0.19519518315792084, -0.43092647194862366, -0.6546619534492493, 0.10740789771080017, 0.3837202489376068, -0.43850353360176086, 1.2952210903167725, 0.22514773905277252, -0.8481696248054504, 0.3970417082309723, -0.6137994527816772, -0.1511424034833908, -0.009699109010398388, -0.04374683275818825, -0.7922477722167969, -0.28369712829589844, 0.18862666189670563, 0.41562238335609436, -0.12074611335992813, -0.0836559608578682, -0.3375304341316223, -0.3719814121723175, 0.32857421040534973, -0.18545125424861908, 1.1955132484436035, -0.04100121185183525, -0.7762861847877502, -0.08150102943181992, -1.2113136053085327, 0.3522651195526123, 0.20176878571510315, -0.3543846011161804, -0.16693133115768433, -0.4750678241252899, -0.04730309918522835, 0.16181683540344238, 0.3093680143356323, -0.7755351066589355, 0.28940486907958984, -0.35203027725219727, 0.14465336501598358, 1.2716256380081177, 0.03703543171286583, 0.12878480553627014, -0.5918351411819458, 0.5109539031982422, 0.19789573550224304, 0.18765491247177124, 0.3948475420475006, -0.5832717418670654, -0.8395498394966125, -0.47686049342155457, -0.08608464896678925, 0.6145046353340149, -0.19350466132164001, 1.0999910831451416, 0.07243961095809937, -0.8900832533836365, -0.46198034286499023, -0.1490689218044281, 0.48629215359687805, 0.7367717623710632, 0.6062960624694824, -0.045898716896772385, -0.6374512910842896, -1.0931464433670044, -0.2981737554073334, -0.155746728181839, 0.14885839819908142, 0.24027150869369507, 1.0232423543930054, -0.24807681143283844, 0.5737326741218567, -1.0553945302963257, -0.1928822249174118, 0.14541609585285187, -0.06507541984319687, 0.8040659427642822, 0.737242579460144, 0.5745393633842468, -0.6789689660072327, -0.5489295125007629, 0.19005730748176575, -0.8733852505683899, -0.10208038985729218, 0.1279437243938446, -0.3076825737953186, 0.11057644337415695, 0.11804995685815811, -0.6857277154922485, 0.5347868204116821, 0.2513333857059479, -1.0518556833267212, 1.078503966331482, -0.34369218349456787, 0.5960415005683899, -1.0051548480987549, 0.17818044126033783, -0.05042970925569534, 0.03132513538002968, -0.5032703280448914, 0.023215146735310555, 0.07241951674222946, 0.45091041922569275, -0.4592859148979187, 0.7892019748687744, -0.6892585158348083, -0.07411564886569977, 0.4350239336490631, 0.1334943026304245, -0.14461813867092133, 0.3703762888908386, -0.23649625480175018, 0.8294247388839722, 0.7479771375656128, -0.48694801330566406, 0.5286058187484741, 0.41542428731918335, -0.23582516610622406, 0.68477463722229, -0.5175360441207886, -0.2700745761394501, 0.30340608954429626, -0.06923839449882507, -0.8265677094459534, -0.5036763548851013, 0.07966657727956772, -0.5721850991249084, -0.1015041396021843, 0.3729921281337738, -0.2647252380847931, -0.75627601146698, -0.9427700638771057, 0.3600037097930908, 0.6946889758110046, -0.4569786787033081, -0.21055062115192413, 0.061794474720954895, 0.08907365053892136, -0.8326653242111206, -0.8298079967498779, -0.49708291888237, -0.23407582938671112, -0.7372424006462097, 0.3477129340171814, -0.30297526717185974, -0.28665661811828613, -0.08395235240459442, -0.2314554750919342, -0.3440960645675659, 0.018080981448292732, 0.1597338765859604, 0.6593153476715088, -0.4063913822174072, -0.31018972396850586, -0.24445860087871552, -0.19391682744026184, 0.22832880914211273, -0.1048077866435051, 0.38174763321876526, -0.49541178345680237, -0.44174110889434814, -0.4458390176296234, 0.00010135568300029263, 0.6602367758750916, -0.07342541962862015, 0.7328658699989319, 0.43109622597694397, -0.2764135003089905, -0.05392301082611084, -0.27223995327949524, -0.26671314239501953, -0.5879053473472595, 0.30502843856811523, -0.46082019805908203, -1.0654029846191406, 0.7583653330802917, 0.48900023102760315, 0.05914069712162018, 1.1253224611282349, 0.6005498766899109, -0.2809920310974121, 0.9741837978363037, 0.0545564666390419, 0.3256964087486267, 0.3907559812068939, -0.6852999329566956, 0.1393287032842636, -0.8956241607666016, -0.3234308362007141, -0.5617648363113403, -0.46301573514938354, -0.7452172040939331, -0.10216131806373596, 0.2873009741306305, 0.15958327054977417, -0.6942473649978638, 0.5604661107063293, -0.838909924030304, 0.5632269382476807, 0.5755367875099182, 0.26730573177337646, 0.18693456053733826, -0.1593986451625824, -0.34502866864204407, -0.13422636687755585, -0.4626882076263428, -0.24466237425804138, 1.1987149715423584, 0.24578772485256195, 0.7091206312179565, 0.07669016718864441, 0.9092031717300415, 0.1329154223203659, -0.08456702530384064, -0.5886600017547607, 0.6543155908584595, 0.10292793065309525, -0.7674590945243835, -0.37881791591644287, -0.49666404724121094, -1.0538548231124878, 0.37022873759269714, -0.06931594014167786, -0.8709378242492676, 0.09128903597593307, 0.04299665614962578, -0.19174888730049133, 0.49803638458251953, -0.5587102770805359, 0.8298813104629517, -0.16489183902740479, -0.47251656651496887, 0.08585774898529053, -0.807932436466217, 0.49405309557914734, 0.189798966050148, 0.22923804819583893, 0.06640725582838058, 0.2744632661342621, 1.1945453882217407, -0.8413631916046143, 0.4387299120426178, 0.07046419382095337, 0.045633405447006226, 0.356218546628952, -0.1973448395729065, 0.5123943090438843, 0.06886167079210281, -0.013062640093266964, -0.09767894446849823, 0.2794177830219269, -0.829512894153595, -0.0657208189368248, 0.9288830757141113, -0.988248884677887, -0.6159469485282898, -0.8979901075363159, -0.5233166217803955, 0.0943179577589035, 0.5332971811294556, 0.36958590149879456, 0.5023574233055115, 0.0027207843959331512, 0.44605889916419983, 0.8365550637245178, -0.14751942455768585, 0.6240964531898499, 0.19772203266620636, 0.11796678602695465, -0.6443235874176025, 0.8389319777488708, 0.10239487141370773, 0.36380091309547424, 0.2524888515472412, 0.3650752902030945, -0.5363134145736694, -0.22485677897930145, -0.21349206566810608, 0.48551955819129944, -0.6200519800186157, -0.2660806179046631, -0.3667912781238556, -0.43014591932296753, -0.7046367526054382, -0.612089991569519, -0.32583773136138916, -0.49496954679489136, -0.45362845063209534, -0.49694526195526123, 0.5938358902931213, 0.4946916997432709, -0.40123310685157776, 0.0845823734998703, -0.5127555131912231, 0.2582769989967346, 0.3338721990585327, 0.5541192889213562, -0.34813597798347473, -0.5691613554954529, 0.033363014459609985, -0.12945403158664703, -0.5576145052909851, -0.9465978145599365, 0.3459946811199188, -0.0293276384472847, 0.5414906740188599, 0.5622559785842896, 0.08453980833292007, 0.8564571142196655, -0.19801576435565948, 1.048087477684021, 0.29975563287734985, -0.819012463092804, 0.7170582413673401, -0.31050533056259155, 0.14445042610168457, 0.621208131313324, 0.18011803925037384, -0.23015232384204865, -0.6142684817314148, -1.2810280323028564, -0.8193175792694092, 0.6972668766975403, 0.3951476514339447, -0.2701798677444458, 0.07225827127695084, 0.15451139211654663, -0.26612982153892517, -0.1830933839082718, -0.7152727246284485, -0.9041139483451843, -0.148884579539299, -0.4917173385620117, 0.1348714828491211, -0.001043478725478053, -0.38735002279281616, -0.7791318893432617, 0.945225715637207, 0.0007777173304930329, 0.6058472990989685, 0.46845823526382446, 0.08200277388095856, 0.0903460904955864, 0.4917858839035034, 0.8834993243217468, 0.7228283882141113, -0.43646588921546936, 0.42147642374038696, 0.40929147601127625, -1.026615023612976, 0.46516233682632446, 0.34030699729919434, -0.09103983640670776, -0.03959733620285988, 0.47392234206199646, 0.4451904892921448, 0.022215377539396286, -0.18743407726287842, 0.6246848106384277, -0.012548917904496193, -0.5459761619567871, -0.4026436507701874, 0.09670613706111908, -0.10227654129266739, 0.043222833424806595, 0.3907857835292816, -0.16957713663578033, -0.05499481037259102, -0.47132202982902527, 0.5052588582038879, 0.36569279432296753, -0.4512266218662262, -0.17579257488250732, 0.72063148021698, -0.20754538476467133, -0.13571985065937042, 0.3437958061695099, -0.18946488201618195, -0.6399574875831604, 1.1280250549316406, 0.59292072057724, 0.6945057511329651, -0.2504044473171234, -0.048326898366212845, 0.9207975268363953, 0.35632020235061646, -0.07077887654304504, 0.5228425860404968, 0.29308798909187317, -0.26081332564353943, 0.14504116773605347, -0.8431917428970337, -0.03128080070018768, 0.1555827260017395, -0.8217287659645081, 0.3095371425151825, -0.5131801962852478, -0.16133977472782135, -0.014513078145682812, 0.44636300206184387, -0.4572412967681885, 0.5483749508857727, -0.4197281002998352, 1.179953932762146, -0.9757844805717468, 0.7216426730155945, 0.729412853717804, -0.5313065648078918, -1.078691840171814, -0.49145692586898804, 0.010961784049868584, -0.8067149519920349, 0.5989947319030762, -0.03067358396947384, 0.157283216714859, -0.07196245342493057, -0.7426995635032654, -0.914420485496521, 1.4181678295135498, -0.051043733954429626, -0.46092796325683594, 0.2417198121547699, -0.038530133664608, 0.4376579523086548, 0.15337207913398743, 0.5905861854553223, 0.7312861680984497, 0.8075951337814331, -0.08234404027462006, -0.76667720079422, 0.36236435174942017, -0.49988216161727905, -0.36951056122779846, 0.43518969416618347, -0.9557530283927917, 1.2172160148620605, -0.018356047570705414, 0.1902218759059906, -0.15944935381412506, 0.6580646634101868, 0.8208134174346924, 0.32276424765586853, 0.3512093722820282, 0.8906491994857788, 0.8471216559410095, -0.5068193078041077, 1.0137332677841187, -0.21459342539310455, 0.8818597197532654, 0.7275055050849915, 0.23326866328716278, 0.7452391386032104, 0.6539160013198853, -0.578467845916748, 0.5156804323196411, 0.8029606938362122, -0.31936803460121155, 0.41976600885391235, 0.2727764844894409, -0.1655360758304596, -0.13853155076503754, 0.3913457691669464, -0.8973148465156555, 0.11434338241815567, 0.0545429028570652, -0.34934380650520325, 0.07933943718671799, -0.5056573748588562, 0.31987184286117554, -0.1099267303943634, -0.03351813927292824, 0.3575417995452881, 0.0856773853302002, -0.45432570576667786, 0.8845939040184021, -0.1373840570449829, 0.7652136087417603, -0.5357544422149658, -0.08760786801576614, -0.39836642146110535, 0.6185051798820496, -0.4564407467842102, -1.0646661520004272, 0.18312527239322662, 0.06773176789283752, -0.11543843150138855, -0.184626504778862, 0.7046905755996704, -0.21505098044872284, -0.7480610013008118, 0.16694968938827515, 0.08763913065195084, 0.10728316009044647, 0.5059977173805237, -0.6562550663948059, -0.34627652168273926, -0.004759784787893295, -0.5722284913063049, 0.128462553024292, 0.32651907205581665, 0.2626151144504547, 0.5456289052963257, 0.6379560828208923, 0.16912396252155304, 0.4198474586009979, -0.5656734108924866, 0.8160884976387024, -1.0393697023391724, -0.7221294045448303, -0.9407998919487, 0.46387210488319397, -0.3274691104888916, -0.8393279314041138, 1.0067064762115479, 1.0195555686950684, 0.9191076755523682, 0.005531493108719587, 0.6686224341392517, -0.39956748485565186, 0.24218341708183289, -0.3779027462005615, 0.9487124085426331, -0.8520148396492004, -0.19602568447589874, -0.2596264183521271, -0.6926127672195435, -0.3595323860645294, 0.8237482905387878, -0.18819382786750793, 0.055521510541439056, 1.0305774211883545, 0.6650241017341614, -0.10657000541687012, 0.05516814440488815, -0.05884409323334694, 0.5664376020431519, 0.3717249929904938, 1.001786708831787, 0.672025203704834, -0.7992036938667297, 0.34401801228523254, -0.5086832046508789, -0.40050530433654785, -0.3877915143966675, -0.4644145667552948, -0.8387128114700317, -0.495636522769928, -0.24316127598285675, -0.5929622054100037, -0.11281336098909378, 0.9954503178596497, 0.47347506880760193, -0.9241388440132141, -0.40138179063796997, -0.12254034727811813, 0.14213567972183228, -0.5692439079284668, -0.41145360469818115, 0.7847623229026794, -0.08599694073200226, -0.5547016859054565, 0.17835935950279236, -0.160137340426445, 0.20236891508102417, 0.13883432745933533, -0.43017101287841797, -0.7196858525276184, -0.00902464333921671, 0.4239029288291931, 0.3072555959224701, -0.6676265597343445, -0.7101746201515198, 0.2983783185482025, -0.49882298707962036, 0.4409971833229065, -0.00407222006469965, -0.4970531761646271, 0.03333550691604614, 0.7125647664070129, 0.48441949486732483, 0.6919822096824646, 0.002370294416323304, 0.08508341014385223, -0.6731122136116028, 0.16071513295173645, -0.01912664994597435, 0.27281227707862854, -0.03528608754277229, -0.31930869817733765, 0.7895463109016418, 0.6828345060348511, -0.5686225891113281, -1.0304762125015259, -0.41803446412086487, -1.4477570056915283, 0.0011438301298767328, 1.1132748126983643, -0.056176960468292236, -0.4663245677947998, 0.256973534822464, -0.1510726809501648, 0.2439548671245575, -0.3166029453277588, 0.7581570148468018, 0.7649637460708618, -0.3809511959552765, 0.09490122646093369, -0.6757892966270447, 0.38824084401130676, 0.5369375944137573, -1.217423439025879, -0.0791032463312149, 0.24466608464717865, 0.33102166652679443, 0.3070939779281616, 0.6521328687667847, -0.07597961276769638, 0.2291852831840515, 0.23504965007305145, 0.02145671844482422, -0.007432813290506601, 0.07432276755571365, -0.2192716747522354, 0.09839647263288498, -0.23905661702156067, -0.49268147349357605 ]
open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v3
open-llm-leaderboard
2023-08-27T12:43:04Z
201
0
[ "region:us" ]
null
2023-08-26T00:05:22Z
--- pretty_name: Evaluation run of yeontaek/llama-2-13B-ensemble-v3 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yeontaek/llama-2-13B-ensemble-v3](https://huggingface.co/yeontaek/llama-2-13B-ensemble-v3)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v3\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-26T00:04:59.687493](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v3/blob/main/results_2023-08-26T00%3A04%3A59.687493.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5779190308106052,\n\ \ \"acc_stderr\": 0.03411621449047892,\n \"acc_norm\": 0.5817150329939268,\n\ \ \"acc_norm_stderr\": 0.03409598894931763,\n \"mc1\": 0.35495716034271724,\n\ \ \"mc1_stderr\": 0.0167508623813759,\n \"mc2\": 0.49782296764839973,\n\ \ \"mc2_stderr\": 0.015206569782538341\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5981228668941979,\n \"acc_stderr\": 0.014327268614578274,\n\ \ \"acc_norm\": 0.6237201365187713,\n \"acc_norm_stderr\": 0.014157022555407161\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6245767775343557,\n\ \ \"acc_stderr\": 0.004832423630593182,\n \"acc_norm\": 0.8229436367257519,\n\ \ \"acc_norm_stderr\": 0.0038093627612481094\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45925925925925926,\n\ \ \"acc_stderr\": 0.04304979692464243,\n \"acc_norm\": 0.45925925925925926,\n\ \ \"acc_norm_stderr\": 0.04304979692464243\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.04063302731486671,\n\ \ \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.04063302731486671\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\ \ \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \ \ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6113207547169811,\n \"acc_stderr\": 0.030000485448675986,\n\ \ \"acc_norm\": 0.6113207547169811,\n \"acc_norm_stderr\": 0.030000485448675986\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6180555555555556,\n\ \ \"acc_stderr\": 0.040629907841466674,\n \"acc_norm\": 0.6180555555555556,\n\ \ \"acc_norm_stderr\": 0.040629907841466674\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5317919075144508,\n\ \ \"acc_stderr\": 0.038047497443647646,\n \"acc_norm\": 0.5317919075144508,\n\ \ \"acc_norm_stderr\": 0.038047497443647646\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.046550104113196177,\n\ \ \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.046550104113196177\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n\ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.46382978723404256,\n \"acc_stderr\": 0.032600385118357715,\n\ \ \"acc_norm\": 0.46382978723404256,\n \"acc_norm_stderr\": 0.032600385118357715\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n\ \ \"acc_stderr\": 0.04303684033537315,\n \"acc_norm\": 0.2982456140350877,\n\ \ \"acc_norm_stderr\": 0.04303684033537315\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192118,\n\ \ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192118\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3386243386243386,\n \"acc_stderr\": 0.02437319786798307,\n \"\ acc_norm\": 0.3386243386243386,\n \"acc_norm_stderr\": 0.02437319786798307\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n\ \ \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n\ \ \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.667741935483871,\n\ \ \"acc_stderr\": 0.0267955608481228,\n \"acc_norm\": 0.667741935483871,\n\ \ \"acc_norm_stderr\": 0.0267955608481228\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.43349753694581283,\n \"acc_stderr\": 0.03486731727419872,\n\ \ \"acc_norm\": 0.43349753694581283,\n \"acc_norm_stderr\": 0.03486731727419872\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\"\ : 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.03567969772268049,\n\ \ \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.03567969772268049\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7474747474747475,\n \"acc_stderr\": 0.030954055470365897,\n \"\ acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.030954055470365897\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.02749350424454806,\n\ \ \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.02749350424454806\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5717948717948718,\n \"acc_stderr\": 0.025088301454694838,\n\ \ \"acc_norm\": 0.5717948717948718,\n \"acc_norm_stderr\": 0.025088301454694838\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.28888888888888886,\n \"acc_stderr\": 0.02763490726417854,\n \ \ \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.02763490726417854\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5966386554621849,\n \"acc_stderr\": 0.031866081214088314,\n\ \ \"acc_norm\": 0.5966386554621849,\n \"acc_norm_stderr\": 0.031866081214088314\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"\ acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7651376146788991,\n \"acc_stderr\": 0.018175110510343567,\n \"\ acc_norm\": 0.7651376146788991,\n \"acc_norm_stderr\": 0.018175110510343567\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502326,\n \"\ acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502326\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7549019607843137,\n \"acc_stderr\": 0.030190282453501947,\n \"\ acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.030190282453501947\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7552742616033755,\n \"acc_stderr\": 0.027985699387036423,\n \ \ \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.027985699387036423\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n\ \ \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n\ \ \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6793893129770993,\n \"acc_stderr\": 0.04093329229834278,\n\ \ \"acc_norm\": 0.6793893129770993,\n \"acc_norm_stderr\": 0.04093329229834278\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\"\ : 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \ \ \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n\ \ \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n\ \ \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n\ \ \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n\ \ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8290598290598291,\n\ \ \"acc_stderr\": 0.02466249684520982,\n \"acc_norm\": 0.8290598290598291,\n\ \ \"acc_norm_stderr\": 0.02466249684520982\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \ \ \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7739463601532567,\n\ \ \"acc_stderr\": 0.014957458504335837,\n \"acc_norm\": 0.7739463601532567,\n\ \ \"acc_norm_stderr\": 0.014957458504335837\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6445086705202312,\n \"acc_stderr\": 0.025770292082977257,\n\ \ \"acc_norm\": 0.6445086705202312,\n \"acc_norm_stderr\": 0.025770292082977257\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4324022346368715,\n\ \ \"acc_stderr\": 0.01656897123354861,\n \"acc_norm\": 0.4324022346368715,\n\ \ \"acc_norm_stderr\": 0.01656897123354861\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6372549019607843,\n \"acc_stderr\": 0.027530078447110307,\n\ \ \"acc_norm\": 0.6372549019607843,\n \"acc_norm_stderr\": 0.027530078447110307\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6495176848874598,\n\ \ \"acc_stderr\": 0.027098652621301754,\n \"acc_norm\": 0.6495176848874598,\n\ \ \"acc_norm_stderr\": 0.027098652621301754\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.026822801759507894,\n\ \ \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.026822801759507894\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.42907801418439717,\n \"acc_stderr\": 0.02952591430255856,\n \ \ \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.02952591430255856\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41395045632333766,\n\ \ \"acc_stderr\": 0.012579699631289262,\n \"acc_norm\": 0.41395045632333766,\n\ \ \"acc_norm_stderr\": 0.012579699631289262\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.03032024326500413,\n\ \ \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.03032024326500413\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5833333333333334,\n \"acc_stderr\": 0.01994491413687358,\n \ \ \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.01994491413687358\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n\ \ \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n\ \ \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6448979591836734,\n \"acc_stderr\": 0.030635655150387638,\n\ \ \"acc_norm\": 0.6448979591836734,\n \"acc_norm_stderr\": 0.030635655150387638\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7263681592039801,\n\ \ \"acc_stderr\": 0.03152439186555401,\n \"acc_norm\": 0.7263681592039801,\n\ \ \"acc_norm_stderr\": 0.03152439186555401\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \ \ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n\ \ \"acc_stderr\": 0.03882310850890593,\n \"acc_norm\": 0.463855421686747,\n\ \ \"acc_norm_stderr\": 0.03882310850890593\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n\ \ \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35495716034271724,\n\ \ \"mc1_stderr\": 0.0167508623813759,\n \"mc2\": 0.49782296764839973,\n\ \ \"mc2_stderr\": 0.015206569782538341\n }\n}\n```" repo_url: https://huggingface.co/yeontaek/llama-2-13B-ensemble-v3 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|arc:challenge|25_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hellaswag|10_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:04:59.687493.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:04:59.687493.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_26T00_04_59.687493 path: - '**/details_harness|truthfulqa:mc|0_2023-08-26T00:04:59.687493.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-26T00:04:59.687493.parquet' - config_name: results data_files: - split: 2023_08_26T00_04_59.687493 path: - results_2023-08-26T00:04:59.687493.parquet - split: latest path: - results_2023-08-26T00:04:59.687493.parquet --- # Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yeontaek/llama-2-13B-ensemble-v3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yeontaek/llama-2-13B-ensemble-v3](https://huggingface.co/yeontaek/llama-2-13B-ensemble-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v3", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-26T00:04:59.687493](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v3/blob/main/results_2023-08-26T00%3A04%3A59.687493.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5779190308106052, "acc_stderr": 0.03411621449047892, "acc_norm": 0.5817150329939268, "acc_norm_stderr": 0.03409598894931763, "mc1": 0.35495716034271724, "mc1_stderr": 0.0167508623813759, "mc2": 0.49782296764839973, "mc2_stderr": 0.015206569782538341 }, "harness|arc:challenge|25": { "acc": 0.5981228668941979, "acc_stderr": 0.014327268614578274, "acc_norm": 0.6237201365187713, "acc_norm_stderr": 0.014157022555407161 }, "harness|hellaswag|10": { "acc": 0.6245767775343557, "acc_stderr": 0.004832423630593182, "acc_norm": 0.8229436367257519, "acc_norm_stderr": 0.0038093627612481094 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45925925925925926, "acc_stderr": 0.04304979692464243, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464243 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5263157894736842, "acc_stderr": 0.04063302731486671, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.04063302731486671 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6113207547169811, "acc_stderr": 0.030000485448675986, "acc_norm": 0.6113207547169811, "acc_norm_stderr": 0.030000485448675986 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6180555555555556, "acc_stderr": 0.040629907841466674, "acc_norm": 0.6180555555555556, "acc_norm_stderr": 0.040629907841466674 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5317919075144508, "acc_stderr": 0.038047497443647646, "acc_norm": 0.5317919075144508, "acc_norm_stderr": 0.038047497443647646 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196177, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196177 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.46382978723404256, "acc_stderr": 0.032600385118357715, "acc_norm": 0.46382978723404256, "acc_norm_stderr": 0.032600385118357715 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537315, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537315 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192118, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192118 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3386243386243386, "acc_stderr": 0.02437319786798307, "acc_norm": 0.3386243386243386, "acc_norm_stderr": 0.02437319786798307 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.667741935483871, "acc_stderr": 0.0267955608481228, "acc_norm": 0.667741935483871, "acc_norm_stderr": 0.0267955608481228 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43349753694581283, "acc_stderr": 0.03486731727419872, "acc_norm": 0.43349753694581283, "acc_norm_stderr": 0.03486731727419872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.03567969772268049, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.03567969772268049 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7474747474747475, "acc_stderr": 0.030954055470365897, "acc_norm": 0.7474747474747475, "acc_norm_stderr": 0.030954055470365897 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.02749350424454806, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.02749350424454806 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5717948717948718, "acc_stderr": 0.025088301454694838, "acc_norm": 0.5717948717948718, "acc_norm_stderr": 0.025088301454694838 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.02763490726417854, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.02763490726417854 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5966386554621849, "acc_stderr": 0.031866081214088314, "acc_norm": 0.5966386554621849, "acc_norm_stderr": 0.031866081214088314 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7651376146788991, "acc_stderr": 0.018175110510343567, "acc_norm": 0.7651376146788991, "acc_norm_stderr": 0.018175110510343567 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502326, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502326 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7549019607843137, "acc_stderr": 0.030190282453501947, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.030190282453501947 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.027985699387036423, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.027985699387036423 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6793893129770993, "acc_stderr": 0.04093329229834278, "acc_norm": 0.6793893129770993, "acc_norm_stderr": 0.04093329229834278 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7055214723926381, "acc_stderr": 0.03581165790474082, "acc_norm": 0.7055214723926381, "acc_norm_stderr": 0.03581165790474082 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8290598290598291, "acc_stderr": 0.02466249684520982, "acc_norm": 0.8290598290598291, "acc_norm_stderr": 0.02466249684520982 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7739463601532567, "acc_stderr": 0.014957458504335837, "acc_norm": 0.7739463601532567, "acc_norm_stderr": 0.014957458504335837 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6445086705202312, "acc_stderr": 0.025770292082977257, "acc_norm": 0.6445086705202312, "acc_norm_stderr": 0.025770292082977257 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4324022346368715, "acc_stderr": 0.01656897123354861, "acc_norm": 0.4324022346368715, "acc_norm_stderr": 0.01656897123354861 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6372549019607843, "acc_stderr": 0.027530078447110307, "acc_norm": 0.6372549019607843, "acc_norm_stderr": 0.027530078447110307 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6495176848874598, "acc_stderr": 0.027098652621301754, "acc_norm": 0.6495176848874598, "acc_norm_stderr": 0.027098652621301754 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.026822801759507894, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.026822801759507894 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.02952591430255856, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.02952591430255856 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41395045632333766, "acc_stderr": 0.012579699631289262, "acc_norm": 0.41395045632333766, "acc_norm_stderr": 0.012579699631289262 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03032024326500413, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03032024326500413 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5833333333333334, "acc_stderr": 0.01994491413687358, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.01994491413687358 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6448979591836734, "acc_stderr": 0.030635655150387638, "acc_norm": 0.6448979591836734, "acc_norm_stderr": 0.030635655150387638 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7263681592039801, "acc_stderr": 0.03152439186555401, "acc_norm": 0.7263681592039801, "acc_norm_stderr": 0.03152439186555401 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890593, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890593 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.35495716034271724, "mc1_stderr": 0.0167508623813759, "mc2": 0.49782296764839973, "mc2_stderr": 0.015206569782538341 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.72100430727005, -0.8006736636161804, 0.30182790756225586, 0.20299722254276276, -0.19439998269081116, -0.046826694160699844, 0.03149000182747841, -0.27883321046829224, 0.5777688026428223, -0.05967181921005249, -0.5285178422927856, -0.6921122074127197, -0.4408344328403473, 0.23142240941524506, -0.057017434388399124, 0.8187710642814636, -0.1586727648973465, -0.12552621960639954, 0.10067705065011978, -0.035208072513341904, -0.24691323935985565, -0.33084920048713684, -0.4372893273830414, -0.3570200502872467, 0.1473066359758377, 0.4450070261955261, 0.3792183995246887, 0.7941033840179443, 0.6655253171920776, 0.31624308228492737, -0.32379546761512756, -0.030519502237439156, -0.17529802024364471, -0.28236570954322815, 0.41706880927085876, -0.36954519152641296, -0.7938819527626038, 0.30896997451782227, 0.7741475701332092, 0.6366713047027588, -0.07533001899719238, 0.32487887144088745, 0.03766017407178879, 0.5481482744216919, -0.34581291675567627, 0.04091373085975647, -0.2781999707221985, 0.19418452680110931, -0.22982555627822876, -0.2587941884994507, -0.289363294839859, -0.23262642323970795, -0.14126934111118317, -0.8859912157058716, 0.2690865397453308, 0.2870604991912842, 1.5465402603149414, -0.11422085762023926, -0.22953186929225922, 0.14972668886184692, -0.1137363538146019, 1.076967477798462, -0.8846998810768127, 0.34681180119514465, 0.7933754324913025, 0.12159000337123871, -0.17480355501174927, -0.6060516238212585, -0.6500154733657837, 0.09314773231744766, -0.36626577377319336, 0.3679482340812683, -0.043842922896146774, -0.19787625968456268, 0.3452942669391632, 0.673591136932373, -0.6754411458969116, 0.19544312357902527, -0.6841506958007812, -0.20995499193668365, 1.082789421081543, 0.3128296434879303, 0.06346913427114487, -0.374442458152771, -0.6954805254936218, -0.6881436705589294, -0.3719854950904846, 0.24523060023784637, 0.4791718125343323, 0.3241371214389801, -0.3865295350551605, 0.7074543833732605, -0.41667184233665466, 0.545875072479248, 0.41618484258651733, -0.037143390625715256, 0.9175393581390381, -0.6601108312606812, -0.5213935971260071, -0.021082894876599312, 1.0893453359603882, 0.585273027420044, 0.003947039134800434, 0.22974041104316711, -0.0005411368329077959, -0.04746273159980774, 0.025458864867687225, -0.8545601963996887, -0.2739681601524353, 0.18171894550323486, -0.4034883975982666, -0.506023645401001, 0.3061698377132416, -0.9058480858802795, 0.15939165651798248, -0.04535607993602753, 0.43364518880844116, -0.4877709746360779, -0.10783787816762924, 0.22269141674041748, -0.42743563652038574, 0.8273119926452637, -0.18548454344272614, -0.7605868577957153, 0.3982599079608917, 0.5114655494689941, 0.7690239548683167, -0.0842830240726471, -0.4290618598461151, -0.10652358829975128, -0.12437120825052261, -0.2964233458042145, 0.5543236136436462, -0.277502179145813, -0.448030561208725, -0.29201701283454895, 0.2835969924926758, -0.24184903502464294, -0.3278476893901825, 0.7415384650230408, -0.25500524044036865, 0.2443956881761551, -0.40654003620147705, -0.6720432639122009, 0.11087941378355026, 0.36256611347198486, -0.4293459951877594, 1.3169481754302979, 0.20931734144687653, -0.8103445172309875, 0.399836927652359, -0.5725948810577393, -0.17107310891151428, -0.018859408795833588, -0.033039968460798264, -0.8031654953956604, -0.26281049847602844, 0.17182931303977966, 0.41920310258865356, -0.13655753433704376, -0.13990989327430725, -0.4151960015296936, -0.35812756419181824, 0.31475740671157837, -0.1701081097126007, 1.2264055013656616, -0.04572200030088425, -0.7533595561981201, -0.1162387803196907, -1.2783970832824707, 0.3241468667984009, 0.2037440687417984, -0.3591099679470062, -0.1747322827577591, -0.4765196144580841, -0.0037715763319283724, 0.1474417895078659, 0.30159783363342285, -0.7853370308876038, 0.29021090269088745, -0.345425009727478, 0.19234494864940643, 1.2924703359603882, 0.04309826344251633, 0.15951576828956604, -0.5595048666000366, 0.4850265085697174, 0.17289333045482635, 0.1299116164445877, 0.4012342691421509, -0.6275112628936768, -0.7610757350921631, -0.49683940410614014, -0.06771069020032883, 0.6140305399894714, -0.2001718133687973, 1.1443277597427368, 0.0971858873963356, -0.9343408942222595, -0.4320211708545685, -0.12042684853076935, 0.46891582012176514, 0.8366608023643494, 0.6306067705154419, -0.04917069897055626, -0.6226243376731873, -1.1251732110977173, -0.23963521420955658, -0.24564319849014282, 0.14315447211265564, 0.18414811789989471, 1.0320968627929688, -0.25612756609916687, 0.570531964302063, -1.0255106687545776, -0.20537355542182922, 0.1520589143037796, -0.03533628582954407, 0.7704281806945801, 0.7391624450683594, 0.6043413281440735, -0.6439520716667175, -0.4558311998844147, 0.15811671316623688, -0.8612734079360962, -0.11368909478187561, 0.14818653464317322, -0.3214717507362366, 0.13731321692466736, 0.12637384235858917, -0.6607524752616882, 0.5148190259933472, 0.24250611662864685, -1.0314627885818481, 1.0670266151428223, -0.3251177966594696, 0.5693259239196777, -1.0253137350082397, 0.18662616610527039, -0.019898058846592903, 0.047008734196424484, -0.508842408657074, 0.020293330773711205, 0.08275295048952103, 0.47299137711524963, -0.46989813446998596, 0.775208055973053, -0.6494969725608826, -0.07888083159923553, 0.46252092719078064, 0.1253916323184967, -0.12575629353523254, 0.3732362389564514, -0.177321657538414, 0.8125824928283691, 0.7578675746917725, -0.4883670508861542, 0.5101898908615112, 0.4295300543308258, -0.2613770663738251, 0.7498939037322998, -0.47833001613616943, -0.2723209261894226, 0.292469322681427, -0.029051730409264565, -0.8657521605491638, -0.5400651097297668, 0.03736488148570061, -0.5641207695007324, -0.0843670442700386, 0.39268964529037476, -0.2530650496482849, -0.8275202512741089, -0.9718316197395325, 0.3594982326030731, 0.7276667952537537, -0.4576297104358673, -0.1870763897895813, 0.05858708545565605, 0.10492058843374252, -0.8276427984237671, -0.8601840734481812, -0.50898277759552, -0.2762848436832428, -0.7364621758460999, 0.36324962973594666, -0.29638659954071045, -0.29018789529800415, -0.0897340402007103, -0.2624703645706177, -0.34521418809890747, 0.04524099826812744, 0.10538015514612198, 0.6996061205863953, -0.4389602243900299, -0.2626754641532898, -0.25417083501815796, -0.1666451394557953, 0.2029106765985489, -0.14507976174354553, 0.37149277329444885, -0.4809498190879822, -0.401642769575119, -0.47377070784568787, -0.03674376755952835, 0.689672589302063, -0.09164882451295853, 0.7227508425712585, 0.44500985741615295, -0.3173328936100006, 0.0126340975984931, -0.31338122487068176, -0.2915826737880707, -0.5820692181587219, 0.2922063171863556, -0.4618372619152069, -1.0444399118423462, 0.7844690084457397, 0.5227899551391602, 0.07025504112243652, 1.1829416751861572, 0.5959133505821228, -0.31823840737342834, 0.9791794419288635, 0.04075949639081955, 0.2785418629646301, 0.38000229001045227, -0.6906829476356506, 0.09749418497085571, -0.9151464104652405, -0.3147270679473877, -0.6216360330581665, -0.5424182415008545, -0.7316937446594238, -0.06519365310668945, 0.2908909022808075, 0.16980573534965515, -0.6723913550376892, 0.6068484783172607, -0.8211341500282288, 0.5734981298446655, 0.567440927028656, 0.24461694061756134, 0.15185469388961792, -0.168412446975708, -0.3512088656425476, -0.10471111536026001, -0.429949551820755, -0.28495776653289795, 1.2350564002990723, 0.25033053755760193, 0.6871750950813293, 0.12570779025554657, 0.9531422257423401, 0.09553174674510956, -0.10902246832847595, -0.5622552037239075, 0.6437735557556152, 0.14514873921871185, -0.7991045117378235, -0.43920132517814636, -0.50276118516922, -1.0715998411178589, 0.3760957419872284, -0.09310127049684525, -0.9285334944725037, 0.1406278759241104, 0.04767526313662529, -0.20394816994667053, 0.48910415172576904, -0.5704661011695862, 0.8384862542152405, -0.12927278876304626, -0.487408846616745, 0.06967736780643463, -0.8503082394599915, 0.49594977498054504, 0.1934814751148224, 0.2521457076072693, 0.06894179433584213, 0.2450757771730423, 1.2068917751312256, -0.8280290365219116, 0.4604998528957367, 0.09199020266532898, 0.01861606352031231, 0.39709287881851196, -0.19626742601394653, 0.5243682861328125, 0.10341295599937439, -0.044695280492305756, -0.10195114463567734, 0.28608179092407227, -0.8673744201660156, -0.011834888719022274, 0.9400262832641602, -0.9955350756645203, -0.6340544819831848, -0.8779255747795105, -0.47350001335144043, 0.06118098646402359, 0.5294327139854431, 0.39676904678344727, 0.5171442031860352, -0.005217444617301226, 0.45166873931884766, 0.8402565121650696, -0.10231859982013702, 0.6399706602096558, 0.19909675419330597, 0.13085554540157318, -0.6260280013084412, 0.8773353695869446, 0.07387412339448929, 0.36514708399772644, 0.2592845559120178, 0.39328959584236145, -0.5581234693527222, -0.18817509710788727, -0.24341149628162384, 0.4779880940914154, -0.6202864050865173, -0.28316226601600647, -0.34370213747024536, -0.36490508913993835, -0.7523350715637207, -0.5966054797172546, -0.30362093448638916, -0.5540745258331299, -0.5344692468643188, -0.5179396271705627, 0.5979815125465393, 0.4795847535133362, -0.40636706352233887, 0.067811980843544, -0.4834608733654022, 0.27164238691329956, 0.35982581973075867, 0.5495112538337708, -0.3864941895008087, -0.5958559513092041, 0.09416168928146362, -0.16300466656684875, -0.5849894285202026, -0.9848272204399109, 0.31854328513145447, -0.0252696480602026, 0.5259096622467041, 0.6041770577430725, 0.036260660737752914, 0.8457795977592468, -0.19447092711925507, 1.1061125993728638, 0.33003154397010803, -0.7997009754180908, 0.716652512550354, -0.2855447232723236, 0.14122001826763153, 0.6339746117591858, 0.146539568901062, -0.1973722279071808, -0.6344817280769348, -1.324930191040039, -0.8040639162063599, 0.7076187133789062, 0.4247407615184784, -0.26789936423301697, 0.040092531591653824, 0.1216118261218071, -0.27580323815345764, -0.17493818700313568, -0.6715739965438843, -0.8691982626914978, -0.2047317773103714, -0.4973336458206177, 0.05392513796687126, 0.03496510908007622, -0.37410497665405273, -0.7627996802330017, 0.9742286205291748, -0.005625009071081877, 0.5992569327354431, 0.46319305896759033, 0.054109688848257065, 0.07147003710269928, 0.500344455242157, 0.9724566340446472, 0.6799992322921753, -0.4505215883255005, 0.4291088581085205, 0.41756540536880493, -1.043564796447754, 0.47187376022338867, 0.3425043523311615, -0.033596113324165344, -0.05468180403113365, 0.5261000394821167, 0.42420127987861633, 0.008164430968463421, -0.20441097021102905, 0.5890527963638306, 0.012063546106219292, -0.571280837059021, -0.36753368377685547, 0.10935865342617035, -0.10586875677108765, 0.01582483947277069, 0.40647509694099426, -0.1408935934305191, -0.02782471291720867, -0.49302923679351807, 0.4510985016822815, 0.34936192631721497, -0.4548220634460449, -0.18983754515647888, 0.717588484287262, -0.19202636182308197, -0.13471858203411102, 0.30982476472854614, -0.19616390764713287, -0.6554691195487976, 1.1562013626098633, 0.5950855016708374, 0.6757070422172546, -0.2747592031955719, -0.0697266086935997, 0.9177326560020447, 0.37418434023857117, -0.10521747171878815, 0.4902009069919586, 0.3092133104801178, -0.247592493891716, 0.16831257939338684, -0.8536876440048218, 0.0034697263035923243, 0.21634835004806519, -0.8038133978843689, 0.31108328700065613, -0.5091866254806519, -0.1709526926279068, 0.014126673340797424, 0.423829585313797, -0.4722044765949249, 0.545087993144989, -0.39680227637290955, 1.196131944656372, -0.9846547245979309, 0.6730051636695862, 0.7559795379638672, -0.5280634164810181, -1.0587153434753418, -0.5136247873306274, 0.058936722576618195, -0.832766056060791, 0.5499880313873291, -0.03436213359236717, 0.17064262926578522, -0.08282347768545151, -0.7319738268852234, -0.9328458905220032, 1.4333868026733398, -0.06428885459899902, -0.4910281300544739, 0.2635849118232727, -0.07692742347717285, 0.4752015173435211, 0.1596539169549942, 0.5779938697814941, 0.7677199840545654, 0.8509181141853333, -0.07204969972372055, -0.7353704571723938, 0.2815491855144501, -0.508536696434021, -0.34084048867225647, 0.47016727924346924, -0.9417363405227661, 1.1542198657989502, 0.004171138163655996, 0.2209690511226654, -0.12230107933282852, 0.6845116019248962, 0.773659348487854, 0.3055626451969147, 0.3699852526187897, 0.9112268090248108, 0.9065629243850708, -0.49455222487449646, 1.0334291458129883, -0.21441957354545593, 0.8301268815994263, 0.6956182718276978, 0.20177996158599854, 0.7680380344390869, 0.6994161009788513, -0.5524348020553589, 0.5544439554214478, 0.8293526768684387, -0.3048739433288574, 0.4238137900829315, 0.2762782871723175, -0.1478835791349411, -0.17758260667324066, 0.4223951995372772, -0.9023919105529785, 0.1279534548521042, 0.07810281217098236, -0.3105189800262451, 0.08521560579538345, -0.4929412603378296, 0.24421463906764984, -0.06769350916147232, -0.049496911466121674, 0.3708915412425995, 0.05250656232237816, -0.43328461050987244, 0.8986397385597229, -0.11312861740589142, 0.7252115607261658, -0.5051583051681519, -0.08891315758228302, -0.36046239733695984, 0.563011109828949, -0.43285301327705383, -1.0624747276306152, 0.15560805797576904, 0.08233744651079178, -0.10920833051204681, -0.15611498057842255, 0.719212532043457, -0.17239996790885925, -0.8066298365592957, 0.11893098801374435, 0.055433712899684906, 0.1023222804069519, 0.5553959012031555, -0.6157822608947754, -0.3453042507171631, -0.04024738073348999, -0.5817533731460571, 0.09411667287349701, 0.2980802357196808, 0.22739125788211823, 0.5203416347503662, 0.6300061941146851, 0.12227826565504074, 0.39881932735443115, -0.5363953709602356, 0.7807909250259399, -1.0224719047546387, -0.7228193283081055, -0.8959002494812012, 0.47035372257232666, -0.3169013261795044, -0.8935753703117371, 0.929258406162262, 1.0165596008300781, 0.8986982107162476, 0.006499058101326227, 0.64052814245224, -0.3688254952430725, 0.2557372748851776, -0.38974931836128235, 0.8941457867622375, -0.8680253624916077, -0.179208904504776, -0.25979843735694885, -0.7225320339202881, -0.36209577322006226, 0.863598108291626, -0.19085693359375, 0.05675804987549782, 1.0406444072723389, 0.6585623621940613, -0.1205759346485138, 0.07388393580913544, -0.07223011553287506, 0.5985064506530762, 0.36660605669021606, 0.9683796763420105, 0.6545897126197815, -0.7704163193702698, 0.3366178870201111, -0.5172271728515625, -0.3999265134334564, -0.4014955163002014, -0.48152846097946167, -0.8662139177322388, -0.502780020236969, -0.22978749871253967, -0.5953986644744873, -0.13639581203460693, 1.0098285675048828, 0.4351259469985962, -0.9192553162574768, -0.4101541042327881, -0.12023917585611343, 0.14063972234725952, -0.5888791084289551, -0.40205731987953186, 0.7398418188095093, -0.10799570381641388, -0.5368737578392029, 0.22517843544483185, -0.16299162805080414, 0.23479020595550537, 0.10592283308506012, -0.41407373547554016, -0.701619565486908, 0.015613020397722721, 0.40214407444000244, 0.3295997083187103, -0.6868345737457275, -0.7539325952529907, 0.3048112988471985, -0.5048593282699585, 0.42775750160217285, -0.07431180775165558, -0.5420334339141846, -0.0008203628822229803, 0.6950193047523499, 0.4876519441604614, 0.6929333806037903, -0.05167834833264351, 0.05175332725048065, -0.6798238158226013, 0.19234387576580048, 0.0010306939948350191, 0.2906668782234192, -0.02761845849454403, -0.2955520451068878, 0.7658196091651917, 0.6898115873336792, -0.5067116618156433, -1.063830852508545, -0.41832759976387024, -1.4478092193603516, -0.026386909186840057, 1.0853086709976196, 0.027760690078139305, -0.4970399737358093, 0.2780799865722656, -0.13633379340171814, 0.19226014614105225, -0.35541602969169617, 0.7913491129875183, 0.8155390620231628, -0.3358537554740906, 0.15747199952602386, -0.615894079208374, 0.3592534363269806, 0.5414776802062988, -1.205478549003601, -0.1455460488796234, 0.2592337131500244, 0.3041622042655945, 0.3945907950401306, 0.568413496017456, -0.06550232321023941, 0.27815428376197815, 0.20358704030513763, 0.021916722878813744, 0.052087824791669846, 0.06545792520046234, -0.23081812262535095, 0.08641129732131958, -0.2560340464115143, -0.4614238440990448 ]
open-llm-leaderboard/details_ehartford__CodeLlama-34b-Instruct-hf
open-llm-leaderboard
2023-08-27T12:43:06Z
201
0
[ "region:us" ]
null
2023-08-26T00:11:37Z
--- pretty_name: Evaluation run of ehartford/CodeLlama-34b-Instruct-hf dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [ehartford/CodeLlama-34b-Instruct-hf](https://huggingface.co/ehartford/CodeLlama-34b-Instruct-hf)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__CodeLlama-34b-Instruct-hf\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-26T00:11:17.332215](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__CodeLlama-34b-Instruct-hf/blob/main/results_2023-08-26T00%3A11%3A17.332215.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3954825543560614,\n\ \ \"acc_stderr\": 0.034996131407759465,\n \"acc_norm\": 0.39693969001192136,\n\ \ \"acc_norm_stderr\": 0.03500279971831286,\n \"mc1\": 0.29008567931456547,\n\ \ \"mc1_stderr\": 0.01588623687420952,\n \"mc2\": 0.4428923144531004,\n\ \ \"mc2_stderr\": 0.014810370517699043\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.378839590443686,\n \"acc_stderr\": 0.01417591549000032,\n\ \ \"acc_norm\": 0.40784982935153585,\n \"acc_norm_stderr\": 0.014361097288449708\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2998406691894045,\n\ \ \"acc_stderr\": 0.004572515919210699,\n \"acc_norm\": 0.35680143397729536,\n\ \ \"acc_norm_stderr\": 0.004780764443411313\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.35555555555555557,\n\ \ \"acc_stderr\": 0.04135176749720386,\n \"acc_norm\": 0.35555555555555557,\n\ \ \"acc_norm_stderr\": 0.04135176749720386\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.3684210526315789,\n \"acc_stderr\": 0.03925523381052932,\n\ \ \"acc_norm\": 0.3684210526315789,\n \"acc_norm_stderr\": 0.03925523381052932\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.44,\n\ \ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.4037735849056604,\n \"acc_stderr\": 0.030197611600197953,\n\ \ \"acc_norm\": 0.4037735849056604,\n \"acc_norm_stderr\": 0.030197611600197953\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3680555555555556,\n\ \ \"acc_stderr\": 0.04032999053960718,\n \"acc_norm\": 0.3680555555555556,\n\ \ \"acc_norm_stderr\": 0.04032999053960718\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\"\ : 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3583815028901734,\n\ \ \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.3583815028901734,\n\ \ \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n\ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.3872340425531915,\n \"acc_stderr\": 0.03184389265339525,\n\ \ \"acc_norm\": 0.3872340425531915,\n \"acc_norm_stderr\": 0.03184389265339525\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n\ \ \"acc_stderr\": 0.04096985139843672,\n \"acc_norm\": 0.2543859649122807,\n\ \ \"acc_norm_stderr\": 0.04096985139843672\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.3448275862068966,\n \"acc_stderr\": 0.03960933549451208,\n\ \ \"acc_norm\": 0.3448275862068966,\n \"acc_norm_stderr\": 0.03960933549451208\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.30423280423280424,\n \"acc_stderr\": 0.02369541500946309,\n \"\ acc_norm\": 0.30423280423280424,\n \"acc_norm_stderr\": 0.02369541500946309\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n\ \ \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n\ \ \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.44193548387096776,\n \"acc_stderr\": 0.02825155790684974,\n \"\ acc_norm\": 0.44193548387096776,\n \"acc_norm_stderr\": 0.02825155790684974\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.3645320197044335,\n \"acc_stderr\": 0.033864057460620905,\n \"\ acc_norm\": 0.3645320197044335,\n \"acc_norm_stderr\": 0.033864057460620905\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\"\ : 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.28484848484848485,\n \"acc_stderr\": 0.035243908445117836,\n\ \ \"acc_norm\": 0.28484848484848485,\n \"acc_norm_stderr\": 0.035243908445117836\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5202020202020202,\n \"acc_stderr\": 0.03559443565563918,\n \"\ acc_norm\": 0.5202020202020202,\n \"acc_norm_stderr\": 0.03559443565563918\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.533678756476684,\n \"acc_stderr\": 0.036002440698671784,\n\ \ \"acc_norm\": 0.533678756476684,\n \"acc_norm_stderr\": 0.036002440698671784\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.3564102564102564,\n \"acc_stderr\": 0.024283140529467295,\n\ \ \"acc_norm\": 0.3564102564102564,\n \"acc_norm_stderr\": 0.024283140529467295\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683515,\n \ \ \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683515\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.39915966386554624,\n \"acc_stderr\": 0.031811100324139245,\n\ \ \"acc_norm\": 0.39915966386554624,\n \"acc_norm_stderr\": 0.031811100324139245\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"\ acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.45321100917431195,\n \"acc_stderr\": 0.021343255165546037,\n \"\ acc_norm\": 0.45321100917431195,\n \"acc_norm_stderr\": 0.021343255165546037\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.24074074074074073,\n \"acc_stderr\": 0.029157522184605596,\n \"\ acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.029157522184605596\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.28431372549019607,\n \"acc_stderr\": 0.03166009679399812,\n \"\ acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.03166009679399812\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.4008438818565401,\n \"acc_stderr\": 0.03190080389473236,\n \ \ \"acc_norm\": 0.4008438818565401,\n \"acc_norm_stderr\": 0.03190080389473236\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4260089686098655,\n\ \ \"acc_stderr\": 0.0331883328621728,\n \"acc_norm\": 0.4260089686098655,\n\ \ \"acc_norm_stderr\": 0.0331883328621728\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.4351145038167939,\n \"acc_stderr\": 0.04348208051644858,\n\ \ \"acc_norm\": 0.4351145038167939,\n \"acc_norm_stderr\": 0.04348208051644858\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5702479338842975,\n \"acc_stderr\": 0.04519082021319773,\n \"\ acc_norm\": 0.5702479338842975,\n \"acc_norm_stderr\": 0.04519082021319773\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5370370370370371,\n\ \ \"acc_stderr\": 0.04820403072760628,\n \"acc_norm\": 0.5370370370370371,\n\ \ \"acc_norm_stderr\": 0.04820403072760628\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.3803680981595092,\n \"acc_stderr\": 0.03814269893261837,\n\ \ \"acc_norm\": 0.3803680981595092,\n \"acc_norm_stderr\": 0.03814269893261837\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\ \ \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n\ \ \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.5436893203883495,\n \"acc_stderr\": 0.049318019942204146,\n\ \ \"acc_norm\": 0.5436893203883495,\n \"acc_norm_stderr\": 0.049318019942204146\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6196581196581197,\n\ \ \"acc_stderr\": 0.03180425204384099,\n \"acc_norm\": 0.6196581196581197,\n\ \ \"acc_norm_stderr\": 0.03180425204384099\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\"\ : 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-miscellaneous|5\"\ : {\n \"acc\": 0.565772669220945,\n \"acc_stderr\": 0.017724589389677785,\n\ \ \"acc_norm\": 0.565772669220945,\n \"acc_norm_stderr\": 0.017724589389677785\n\ \ },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.41040462427745666,\n\ \ \"acc_stderr\": 0.02648339204209818,\n \"acc_norm\": 0.41040462427745666,\n\ \ \"acc_norm_stderr\": 0.02648339204209818\n },\n \"harness|hendrycksTest-moral_scenarios|5\"\ : {\n \"acc\": 0.20446927374301677,\n \"acc_stderr\": 0.013488813404711917,\n\ \ \"acc_norm\": 0.20446927374301677,\n \"acc_norm_stderr\": 0.013488813404711917\n\ \ },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4117647058823529,\n\ \ \"acc_stderr\": 0.02818059632825929,\n \"acc_norm\": 0.4117647058823529,\n\ \ \"acc_norm_stderr\": 0.02818059632825929\n },\n \"harness|hendrycksTest-philosophy|5\"\ : {\n \"acc\": 0.5048231511254019,\n \"acc_stderr\": 0.028396770444111298,\n\ \ \"acc_norm\": 0.5048231511254019,\n \"acc_norm_stderr\": 0.028396770444111298\n\ \ },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4567901234567901,\n\ \ \"acc_stderr\": 0.02771666165019404,\n \"acc_norm\": 0.4567901234567901,\n\ \ \"acc_norm_stderr\": 0.02771666165019404\n },\n \"harness|hendrycksTest-professional_accounting|5\"\ : {\n \"acc\": 0.31560283687943264,\n \"acc_stderr\": 0.027724989449509314,\n\ \ \"acc_norm\": 0.31560283687943264,\n \"acc_norm_stderr\": 0.027724989449509314\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.27249022164276404,\n\ \ \"acc_stderr\": 0.011371658294311514,\n \"acc_norm\": 0.27249022164276404,\n\ \ \"acc_norm_stderr\": 0.011371658294311514\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.33088235294117646,\n \"acc_stderr\": 0.02858270975389844,\n\ \ \"acc_norm\": 0.33088235294117646,\n \"acc_norm_stderr\": 0.02858270975389844\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.3431372549019608,\n \"acc_stderr\": 0.019206606848825365,\n \ \ \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.019206606848825365\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5272727272727272,\n\ \ \"acc_stderr\": 0.04782001791380061,\n \"acc_norm\": 0.5272727272727272,\n\ \ \"acc_norm_stderr\": 0.04782001791380061\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.2897959183673469,\n \"acc_stderr\": 0.02904308868330432,\n\ \ \"acc_norm\": 0.2897959183673469,\n \"acc_norm_stderr\": 0.02904308868330432\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.48258706467661694,\n\ \ \"acc_stderr\": 0.03533389234739244,\n \"acc_norm\": 0.48258706467661694,\n\ \ \"acc_norm_stderr\": 0.03533389234739244\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \ \ \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4036144578313253,\n\ \ \"acc_stderr\": 0.03819486140758398,\n \"acc_norm\": 0.4036144578313253,\n\ \ \"acc_norm_stderr\": 0.03819486140758398\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6432748538011696,\n \"acc_stderr\": 0.03674013002860954,\n\ \ \"acc_norm\": 0.6432748538011696,\n \"acc_norm_stderr\": 0.03674013002860954\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29008567931456547,\n\ \ \"mc1_stderr\": 0.01588623687420952,\n \"mc2\": 0.4428923144531004,\n\ \ \"mc2_stderr\": 0.014810370517699043\n }\n}\n```" repo_url: https://huggingface.co/ehartford/CodeLlama-34b-Instruct-hf leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|arc:challenge|25_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hellaswag|10_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:11:17.332215.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:11:17.332215.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_26T00_11_17.332215 path: - '**/details_harness|truthfulqa:mc|0_2023-08-26T00:11:17.332215.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-26T00:11:17.332215.parquet' - config_name: results data_files: - split: 2023_08_26T00_11_17.332215 path: - results_2023-08-26T00:11:17.332215.parquet - split: latest path: - results_2023-08-26T00:11:17.332215.parquet --- # Dataset Card for Evaluation run of ehartford/CodeLlama-34b-Instruct-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/CodeLlama-34b-Instruct-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/CodeLlama-34b-Instruct-hf](https://huggingface.co/ehartford/CodeLlama-34b-Instruct-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__CodeLlama-34b-Instruct-hf", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-26T00:11:17.332215](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__CodeLlama-34b-Instruct-hf/blob/main/results_2023-08-26T00%3A11%3A17.332215.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.3954825543560614, "acc_stderr": 0.034996131407759465, "acc_norm": 0.39693969001192136, "acc_norm_stderr": 0.03500279971831286, "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.4428923144531004, "mc2_stderr": 0.014810370517699043 }, "harness|arc:challenge|25": { "acc": 0.378839590443686, "acc_stderr": 0.01417591549000032, "acc_norm": 0.40784982935153585, "acc_norm_stderr": 0.014361097288449708 }, "harness|hellaswag|10": { "acc": 0.2998406691894045, "acc_stderr": 0.004572515919210699, "acc_norm": 0.35680143397729536, "acc_norm_stderr": 0.004780764443411313 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.35555555555555557, "acc_stderr": 0.04135176749720386, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.04135176749720386 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3684210526315789, "acc_stderr": 0.03925523381052932, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.03925523381052932 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4037735849056604, "acc_stderr": 0.030197611600197953, "acc_norm": 0.4037735849056604, "acc_norm_stderr": 0.030197611600197953 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3680555555555556, "acc_stderr": 0.04032999053960718, "acc_norm": 0.3680555555555556, "acc_norm_stderr": 0.04032999053960718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3583815028901734, "acc_stderr": 0.036563436533531585, "acc_norm": 0.3583815028901734, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3872340425531915, "acc_stderr": 0.03184389265339525, "acc_norm": 0.3872340425531915, "acc_norm_stderr": 0.03184389265339525 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.04096985139843672, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.04096985139843672 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.3448275862068966, "acc_stderr": 0.03960933549451208, "acc_norm": 0.3448275862068966, "acc_norm_stderr": 0.03960933549451208 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.02369541500946309, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.02369541500946309 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.44193548387096776, "acc_stderr": 0.02825155790684974, "acc_norm": 0.44193548387096776, "acc_norm_stderr": 0.02825155790684974 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.033864057460620905, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.033864057460620905 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.28484848484848485, "acc_stderr": 0.035243908445117836, "acc_norm": 0.28484848484848485, "acc_norm_stderr": 0.035243908445117836 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5202020202020202, "acc_stderr": 0.03559443565563918, "acc_norm": 0.5202020202020202, "acc_norm_stderr": 0.03559443565563918 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.533678756476684, "acc_stderr": 0.036002440698671784, "acc_norm": 0.533678756476684, "acc_norm_stderr": 0.036002440698671784 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3564102564102564, "acc_stderr": 0.024283140529467295, "acc_norm": 0.3564102564102564, "acc_norm_stderr": 0.024283140529467295 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683515, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683515 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.39915966386554624, "acc_stderr": 0.031811100324139245, "acc_norm": 0.39915966386554624, "acc_norm_stderr": 0.031811100324139245 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.45321100917431195, "acc_stderr": 0.021343255165546037, "acc_norm": 0.45321100917431195, "acc_norm_stderr": 0.021343255165546037 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.029157522184605596, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.029157522184605596 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.28431372549019607, "acc_stderr": 0.03166009679399812, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.03166009679399812 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.4008438818565401, "acc_stderr": 0.03190080389473236, "acc_norm": 0.4008438818565401, "acc_norm_stderr": 0.03190080389473236 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.4260089686098655, "acc_stderr": 0.0331883328621728, "acc_norm": 0.4260089686098655, "acc_norm_stderr": 0.0331883328621728 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.4351145038167939, "acc_stderr": 0.04348208051644858, "acc_norm": 0.4351145038167939, "acc_norm_stderr": 0.04348208051644858 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5702479338842975, "acc_stderr": 0.04519082021319773, "acc_norm": 0.5702479338842975, "acc_norm_stderr": 0.04519082021319773 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760628, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760628 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3803680981595092, "acc_stderr": 0.03814269893261837, "acc_norm": 0.3803680981595092, "acc_norm_stderr": 0.03814269893261837 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.5436893203883495, "acc_stderr": 0.049318019942204146, "acc_norm": 0.5436893203883495, "acc_norm_stderr": 0.049318019942204146 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6196581196581197, "acc_stderr": 0.03180425204384099, "acc_norm": 0.6196581196581197, "acc_norm_stderr": 0.03180425204384099 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.565772669220945, "acc_stderr": 0.017724589389677785, "acc_norm": 0.565772669220945, "acc_norm_stderr": 0.017724589389677785 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.41040462427745666, "acc_stderr": 0.02648339204209818, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.02648339204209818 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.20446927374301677, "acc_stderr": 0.013488813404711917, "acc_norm": 0.20446927374301677, "acc_norm_stderr": 0.013488813404711917 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4117647058823529, "acc_stderr": 0.02818059632825929, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.02818059632825929 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5048231511254019, "acc_stderr": 0.028396770444111298, "acc_norm": 0.5048231511254019, "acc_norm_stderr": 0.028396770444111298 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4567901234567901, "acc_stderr": 0.02771666165019404, "acc_norm": 0.4567901234567901, "acc_norm_stderr": 0.02771666165019404 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.31560283687943264, "acc_stderr": 0.027724989449509314, "acc_norm": 0.31560283687943264, "acc_norm_stderr": 0.027724989449509314 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.27249022164276404, "acc_stderr": 0.011371658294311514, "acc_norm": 0.27249022164276404, "acc_norm_stderr": 0.011371658294311514 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.33088235294117646, "acc_stderr": 0.02858270975389844, "acc_norm": 0.33088235294117646, "acc_norm_stderr": 0.02858270975389844 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3431372549019608, "acc_stderr": 0.019206606848825365, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.019206606848825365 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2897959183673469, "acc_stderr": 0.02904308868330432, "acc_norm": 0.2897959183673469, "acc_norm_stderr": 0.02904308868330432 }, "harness|hendrycksTest-sociology|5": { "acc": 0.48258706467661694, "acc_stderr": 0.03533389234739244, "acc_norm": 0.48258706467661694, "acc_norm_stderr": 0.03533389234739244 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-virology|5": { "acc": 0.4036144578313253, "acc_stderr": 0.03819486140758398, "acc_norm": 0.4036144578313253, "acc_norm_stderr": 0.03819486140758398 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6432748538011696, "acc_stderr": 0.03674013002860954, "acc_norm": 0.6432748538011696, "acc_norm_stderr": 0.03674013002860954 }, "harness|truthfulqa:mc|0": { "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.4428923144531004, "mc2_stderr": 0.014810370517699043 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6974145770072937, -0.8354204297065735, 0.31322526931762695, 0.178365558385849, -0.10930223017930984, -0.059435371309518814, 0.0049810269847512245, -0.21916861832141876, 0.5749392509460449, -0.05171225219964981, -0.47438275814056396, -0.7115140557289124, -0.4043838083744049, 0.23186355829238892, -0.01848231628537178, 0.8015747666358948, -0.20688940584659576, -0.1340552568435669, 0.06105254963040352, -0.027935566380620003, -0.21694594621658325, -0.36420711874961853, -0.4799390435218811, -0.359978049993515, 0.1520637720823288, 0.426011860370636, 0.43833163380622864, 0.8540390133857727, 0.701448917388916, 0.2915721833705902, -0.2983180284500122, -0.03947179764509201, -0.17763929069042206, -0.30228570103645325, 0.40673771500587463, -0.3611994683742523, -0.8575595021247864, 0.306863397359848, 0.7506565451622009, 0.6362485289573669, -0.12380307912826538, 0.28072547912597656, 0.04212094843387604, 0.5974147319793701, -0.3683810234069824, 0.05021261051297188, -0.2562216818332672, 0.24758656322956085, -0.17527198791503906, -0.2788405418395996, -0.30664849281311035, -0.25754314661026, -0.12368795275688171, -0.887872040271759, 0.2680291533470154, 0.335587739944458, 1.571958303451538, -0.14128421247005463, -0.20593830943107605, 0.0647256150841713, -0.07495998591184616, 1.0369901657104492, -0.8799488544464111, 0.3466956615447998, 0.8185805678367615, 0.09617679566144943, -0.15053938329219818, -0.5813848972320557, -0.5709908604621887, 0.06460822373628616, -0.3830663859844208, 0.35332173109054565, -0.05040193349123001, -0.14329539239406586, 0.3712402880191803, 0.6733340620994568, -0.6926211714744568, 0.16991490125656128, -0.6524299383163452, -0.19523167610168457, 1.0627423524856567, 0.3712599277496338, 0.0503719225525856, -0.397831529378891, -0.6846440434455872, -0.6455827355384827, -0.41955724358558655, 0.2581738233566284, 0.4296095371246338, 0.3175146281719208, -0.44016528129577637, 0.662125289440155, -0.37936094403266907, 0.5858392715454102, 0.40951240062713623, 0.010033969767391682, 0.8890241384506226, -0.652205228805542, -0.5359824895858765, -0.057323481887578964, 1.093126893043518, 0.5489863157272339, 0.05698688328266144, 0.23847819864749908, 0.01966453343629837, -0.08860050141811371, 0.0258603785187006, -0.8612585067749023, -0.3499167859554291, 0.18927010893821716, -0.38926932215690613, -0.5012698173522949, 0.37903326749801636, -0.9040852189064026, 0.1763765960931778, -0.03267858549952507, 0.3997708857059479, -0.4846293032169342, -0.09795422852039337, 0.23985524475574493, -0.4068949520587921, 0.8066771626472473, -0.18894065916538239, -0.7590968608856201, 0.3694566786289215, 0.5159658789634705, 0.7599875330924988, -0.11270289123058319, -0.44923827052116394, -0.08758559823036194, -0.11324053257703781, -0.2814639210700989, 0.5250065922737122, -0.2844616174697876, -0.39466172456741333, -0.2636287212371826, 0.28508460521698, -0.26282379031181335, -0.36661311984062195, 0.7490331530570984, -0.22220642864704132, 0.21503330767154694, -0.40876665711402893, -0.6602636575698853, 0.14896449446678162, 0.38602155447006226, -0.41845279932022095, 1.3020325899124146, 0.26156556606292725, -0.7830455303192139, 0.38234952092170715, -0.613052487373352, -0.1772185117006302, -0.03428514301776886, -0.06617999821901321, -0.792799174785614, -0.29978230595588684, 0.1822001338005066, 0.39937493205070496, -0.1228947788476944, -0.12101206183433533, -0.3909126818180084, -0.3286845088005066, 0.3553251624107361, -0.15300112962722778, 1.2011101245880127, -0.0074761840514838696, -0.7524283528327942, -0.14197108149528503, -1.2402153015136719, 0.35732024908065796, 0.19156381487846375, -0.4189901649951935, -0.17731493711471558, -0.49854210019111633, -0.004236928187310696, 0.17675532400608063, 0.28222090005874634, -0.8096185326576233, 0.24446766078472137, -0.3666042983531952, 0.2042933851480484, 1.2512211799621582, 0.02794675901532173, 0.17959360778331757, -0.5855802297592163, 0.5378686785697937, 0.18872907757759094, 0.1905526965856552, 0.34016871452331543, -0.5990737080574036, -0.8043017387390137, -0.515255331993103, -0.04888910427689552, 0.5749157071113586, -0.18344710767269135, 1.1337218284606934, 0.09523336589336395, -0.8651503920555115, -0.48098158836364746, -0.1383497267961502, 0.5275506973266602, 0.7556934356689453, 0.5733345746994019, -0.03908331319689751, -0.6325365900993347, -1.0892822742462158, -0.30732935667037964, -0.19145026803016663, 0.14361700415611267, 0.24640212953090668, 1.0355535745620728, -0.2508806586265564, 0.6175359487533569, -1.0783812999725342, -0.20596258342266083, 0.21264256536960602, -0.03134112432599068, 0.7925135493278503, 0.7383843064308167, 0.5518088340759277, -0.6681260466575623, -0.5405102968215942, 0.14464673399925232, -0.9049725532531738, -0.11353538930416107, 0.11781977117061615, -0.3295411169528961, 0.14218053221702576, 0.14085663855075836, -0.6573479771614075, 0.5314609408378601, 0.2294730544090271, -1.0682992935180664, 1.0379905700683594, -0.3515686094760895, 0.5995867252349854, -0.9914682507514954, 0.1635971963405609, -0.04842756316065788, 0.0050885118544101715, -0.48747915029525757, 0.06394954025745392, 0.11022868007421494, 0.42800888419151306, -0.4835238456726074, 0.7432570457458496, -0.7080358266830444, -0.05926712229847908, 0.4472093880176544, 0.1303088665008545, -0.08635495603084564, 0.3772786259651184, -0.237125962972641, 0.7728115320205688, 0.7747836112976074, -0.46866080164909363, 0.517306387424469, 0.42639005184173584, -0.20567232370376587, 0.7227729558944702, -0.48576098680496216, -0.2836868464946747, 0.3210335969924927, -0.07687723636627197, -0.8178873062133789, -0.4902140498161316, 0.027906592935323715, -0.6062315106391907, -0.10740145295858383, 0.3709355890750885, -0.2594219744205475, -0.8149153590202332, -0.941335141658783, 0.3202840983867645, 0.7542116045951843, -0.45406585931777954, -0.17588478326797485, 0.09147938340902328, 0.1177477091550827, -0.7943638563156128, -0.8503808379173279, -0.4976932108402252, -0.20550672709941864, -0.726856529712677, 0.35030779242515564, -0.2678278684616089, -0.3009745478630066, -0.07336046546697617, -0.2143590897321701, -0.36179208755493164, -0.01637587696313858, 0.1655505895614624, 0.6666693091392517, -0.4079325497150421, -0.2894332706928253, -0.2732419967651367, -0.15706713497638702, 0.27816951274871826, -0.09903482347726822, 0.3689022958278656, -0.48954296112060547, -0.41781193017959595, -0.4268704354763031, -0.03200070932507515, 0.7184098958969116, -0.06931064277887344, 0.7124611139297485, 0.39168068766593933, -0.31272372603416443, -0.03456219658255577, -0.2799433767795563, -0.3013331890106201, -0.5866190791130066, 0.27773842215538025, -0.4664478003978729, -1.0327646732330322, 0.7741915583610535, 0.5380643606185913, 0.04161137342453003, 1.1399319171905518, 0.5867159962654114, -0.31735458970069885, 0.987963080406189, 0.043333474546670914, 0.3382224440574646, 0.36835622787475586, -0.6854456067085266, 0.09666584432125092, -0.9082147479057312, -0.3050830662250519, -0.588692843914032, -0.4527836740016937, -0.7019532322883606, -0.057287681847810745, 0.2906888425350189, 0.14539329707622528, -0.6881214380264282, 0.5965575575828552, -0.8416957855224609, 0.5792374610900879, 0.5269112586975098, 0.2925577163696289, 0.17226627469062805, -0.1461872011423111, -0.38259410858154297, -0.11766625195741653, -0.5079352259635925, -0.2641773223876953, 1.2417042255401611, 0.2972200810909271, 0.7728877663612366, 0.0916132926940918, 0.9105759263038635, 0.11423515528440475, -0.08158445358276367, -0.5960489511489868, 0.6211801767349243, 0.1487322896718979, -0.7963675856590271, -0.42725205421447754, -0.5376958250999451, -1.0633212327957153, 0.37493184208869934, -0.12047827243804932, -0.8079590797424316, 0.11607249081134796, 0.037729986011981964, -0.21593336760997772, 0.48263585567474365, -0.5294767618179321, 0.8075814247131348, -0.12033629417419434, -0.4787631630897522, 0.07881459593772888, -0.8029105067253113, 0.4258347451686859, 0.16706030070781708, 0.2952873408794403, 0.06623191386461258, 0.26023757457733154, 1.209071397781372, -0.8194707632064819, 0.41966190934181213, 0.11787453293800354, 0.02678512968122959, 0.33245769143104553, -0.17650169134140015, 0.4866751730442047, 0.06870722770690918, -0.027460871264338493, -0.10331788659095764, 0.30706191062927246, -0.8457044959068298, -0.03746115043759346, 0.9391435384750366, -0.9622763991355896, -0.572525680065155, -0.9118761420249939, -0.5368116497993469, 0.11591317504644394, 0.5344675779342651, 0.3793865442276001, 0.5858327746391296, 0.006067763082683086, 0.4467494487762451, 0.8420336842536926, -0.13675394654273987, 0.6289539933204651, 0.26750779151916504, 0.09523770958185196, -0.6474223732948303, 0.8097353577613831, 0.07321370393037796, 0.3830895721912384, 0.2632479667663574, 0.3906659781932831, -0.5282900929450989, -0.1810036599636078, -0.2098509967327118, 0.4817010164260864, -0.6606139540672302, -0.2541077136993408, -0.3739679455757141, -0.382475882768631, -0.7479899525642395, -0.650067925453186, -0.32302775979042053, -0.4940211772918701, -0.4907500147819519, -0.45026400685310364, 0.5775725245475769, 0.5086424946784973, -0.39912721514701843, 0.022135522216558456, -0.5149950981140137, 0.2435632348060608, 0.3101365566253662, 0.5518202185630798, -0.37637192010879517, -0.6027448177337646, 0.004739040043205023, -0.12750877439975739, -0.5872150659561157, -0.9558765888214111, 0.3544851839542389, -0.017971355468034744, 0.5219271183013916, 0.5787824988365173, 0.09959796071052551, 0.8267350792884827, -0.21902278065681458, 1.0373581647872925, 0.33172404766082764, -0.7739435434341431, 0.7221674919128418, -0.32322704792022705, 0.19566749036312103, 0.6492032408714294, 0.1888810247182846, -0.16706755757331848, -0.6808893084526062, -1.2925193309783936, -0.7892723679542542, 0.6682289242744446, 0.4008311331272125, -0.2893044650554657, 0.04725836589932442, 0.14491543173789978, -0.3207741975784302, -0.18353314697742462, -0.6700624227523804, -0.8901423215866089, -0.12647071480751038, -0.5193755626678467, 0.1268738955259323, 0.063416987657547, -0.41373109817504883, -0.8374863862991333, 0.9261883497238159, 0.01679953560233116, 0.6159499883651733, 0.48777127265930176, 0.03587747737765312, 0.04461580514907837, 0.5071101784706116, 0.9106220006942749, 0.7961593866348267, -0.4743940830230713, 0.4350994825363159, 0.3829285800457001, -1.0469945669174194, 0.4691707193851471, 0.29229143261909485, -0.033303700387477875, -0.006795726250857115, 0.4662993252277374, 0.3923531174659729, 0.07648491859436035, -0.21731087565422058, 0.6144256591796875, 0.020518260076642036, -0.5667004585266113, -0.4379132091999054, 0.08193537592887878, -0.13666324317455292, -0.02954394556581974, 0.4020223915576935, -0.15343892574310303, -0.03912181034684181, -0.5141112208366394, 0.5118571519851685, 0.3266768455505371, -0.49399682879447937, -0.1496923714876175, 0.7061017155647278, -0.18154403567314148, -0.1837817281484604, 0.34170809388160706, -0.18922826647758484, -0.6665099859237671, 1.1678121089935303, 0.6027739644050598, 0.6937680244445801, -0.25452718138694763, -0.06363449990749359, 0.9278908967971802, 0.38168764114379883, -0.0502212755382061, 0.5339807271957397, 0.3389233946800232, -0.2528300881385803, 0.18382877111434937, -0.8671044707298279, -0.06029212847352028, 0.12686245143413544, -0.8794422149658203, 0.2941313087940216, -0.5047217607498169, -0.18266700208187103, 0.02221185900270939, 0.35362938046455383, -0.4594985544681549, 0.5269537568092346, -0.41584232449531555, 1.1887894868850708, -0.9729094505310059, 0.7009974122047424, 0.7632979154586792, -0.5202500820159912, -1.0388849973678589, -0.5057821273803711, 0.029323754832148552, -0.800575315952301, 0.5941793322563171, -0.040892746299505234, 0.1725742369890213, -0.05793255567550659, -0.7146490812301636, -0.8991678357124329, 1.387323021888733, -0.07579705119132996, -0.4703601598739624, 0.24077695608139038, -0.031438231468200684, 0.448767751455307, 0.12174081802368164, 0.6021965146064758, 0.7628871202468872, 0.830600917339325, -0.10990450531244278, -0.7404835820198059, 0.33751481771469116, -0.49041444063186646, -0.3411772847175598, 0.4790174663066864, -0.8710294365882874, 1.231055736541748, -0.0043496256694197655, 0.19818681478500366, -0.1643192321062088, 0.6220519542694092, 0.7765564918518066, 0.2836352586746216, 0.3427581489086151, 0.9423640370368958, 0.8345962166786194, -0.5180625915527344, 1.0228302478790283, -0.24725711345672607, 0.9180575013160706, 0.6750507354736328, 0.22642087936401367, 0.7943498492240906, 0.6685072183609009, -0.5828800201416016, 0.546000063419342, 0.7945840954780579, -0.33023399114608765, 0.402164101600647, 0.28320974111557007, -0.10872061550617218, -0.13938675820827484, 0.44508224725723267, -0.8839175701141357, 0.12309220433235168, 0.05107324197888374, -0.3767666816711426, 0.06118027865886688, -0.4344736635684967, 0.28939035534858704, -0.0421924963593483, -0.04385458678007126, 0.3675746023654938, 0.06569699198007584, -0.4292911887168884, 0.9765947461128235, -0.126015305519104, 0.786597490310669, -0.5100945234298706, -0.10985974967479706, -0.3298105299472809, 0.621094822883606, -0.4604252576828003, -1.067283272743225, 0.17102035880088806, 0.0669567659497261, -0.13823144137859344, -0.1818029135465622, 0.6723149418830872, -0.22916339337825775, -0.734376072883606, 0.13941103219985962, 0.0489426888525486, 0.11383567005395889, 0.5112225413322449, -0.6797498464584351, -0.3369491398334503, -0.058851148933172226, -0.5346806049346924, 0.10670366883277893, 0.2376701384782791, 0.3372601866722107, 0.520471453666687, 0.6141583919525146, 0.14203661680221558, 0.4001157581806183, -0.568490743637085, 0.7733877897262573, -1.04148268699646, -0.7585158944129944, -0.9311856627464294, 0.4312744438648224, -0.3190585672855377, -0.8697100877761841, 1.007142424583435, 1.035251498222351, 0.893949031829834, -0.018688762560486794, 0.6348840594291687, -0.391486257314682, 0.24991321563720703, -0.40404871106147766, 0.9721923470497131, -0.8254323601722717, -0.21179258823394775, -0.2433752417564392, -0.7091362476348877, -0.33618658781051636, 0.853279709815979, -0.17398838698863983, 0.05555804818868637, 1.1108654737472534, 0.7030588388442993, -0.09217656403779984, 0.07656154781579971, -0.02785649336874485, 0.5749645829200745, 0.3694359064102173, 1.0202641487121582, 0.6515511870384216, -0.7850374579429626, 0.3153209984302521, -0.508537769317627, -0.4376465976238251, -0.3728125989437103, -0.4591308832168579, -0.8807420134544373, -0.4910722076892853, -0.22505304217338562, -0.6371318697929382, -0.10077369213104248, 1.0321568250656128, 0.48472702503204346, -0.931425929069519, -0.4414322078227997, -0.10388118773698807, 0.11338654160499573, -0.5574198365211487, -0.42147335410118103, 0.7269563674926758, -0.10214072465896606, -0.5193314552307129, 0.18642577528953552, -0.1363045573234558, 0.1801595687866211, 0.08867678791284561, -0.4344502389431, -0.7031253576278687, 0.015689527615904808, 0.4270831346511841, 0.3314279019832611, -0.6809436082839966, -0.6913043856620789, 0.30489978194236755, -0.5571568012237549, 0.4689787030220032, -0.012152145616710186, -0.5020984411239624, 0.03809370473027229, 0.7121211886405945, 0.4857803285121918, 0.6659883856773376, -0.0424492210149765, 0.08903899043798447, -0.6568886637687683, 0.1615123748779297, 0.001459782593883574, 0.2952601909637451, -0.04151584580540657, -0.33533185720443726, 0.7704138159751892, 0.6553371548652649, -0.5495375990867615, -1.0606886148452759, -0.4157871901988983, -1.4073997735977173, -0.03387122228741646, 1.1318566799163818, -0.010291125625371933, -0.5014461278915405, 0.22630594670772552, -0.10667473822832108, 0.1754155308008194, -0.3348139822483063, 0.7549008727073669, 0.7919335961341858, -0.3563412129878998, 0.10683504492044449, -0.6468731760978699, 0.36719515919685364, 0.5292686820030212, -1.2244558334350586, -0.11332038044929504, 0.2478732019662857, 0.30052003264427185, 0.35120028257369995, 0.6114456653594971, -0.12443887442350388, 0.2626253664493561, 0.2079267054796219, 0.058544084429740906, -0.027547985315322876, 0.03690630570054054, -0.2731022834777832, 0.100962795317173, -0.2596253752708435, -0.4410369098186493 ]
open-llm-leaderboard/details_nicholasKluge__Aira-2-124M
open-llm-leaderboard
2023-08-27T12:43:08Z
201
0
[ "region:us" ]
null
2023-08-26T00:59:08Z
--- pretty_name: Evaluation run of nicholasKluge/Aira-2-124M dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [nicholasKluge/Aira-2-124M](https://huggingface.co/nicholasKluge/Aira-2-124M)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nicholasKluge__Aira-2-124M\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-26T00:58:54.483693](https://huggingface.co/datasets/open-llm-leaderboard/details_nicholasKluge__Aira-2-124M/blob/main/results_2023-08-26T00%3A58%3A54.483693.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2563784281118179,\n\ \ \"acc_stderr\": 0.03131922643477471,\n \"acc_norm\": 0.25747333491194596,\n\ \ \"acc_norm_stderr\": 0.03133423457395941,\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.014816195991931583,\n \"mc2\": 0.39825983953563676,\n\ \ \"mc2_stderr\": 0.014916655527587098\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2030716723549488,\n \"acc_stderr\": 0.011755899303705582,\n\ \ \"acc_norm\": 0.2431740614334471,\n \"acc_norm_stderr\": 0.012536554144587094\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2907787293367855,\n\ \ \"acc_stderr\": 0.004531935391507024,\n \"acc_norm\": 0.3152758414658435,\n\ \ \"acc_norm_stderr\": 0.004636760762522853\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n\ \ \"acc_stderr\": 0.03633384414073461,\n \"acc_norm\": 0.22962962962962963,\n\ \ \"acc_norm_stderr\": 0.03633384414073461\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n\ \ \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \ \ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.2490566037735849,\n \"acc_stderr\": 0.02661648298050171,\n\ \ \"acc_norm\": 0.2490566037735849,\n \"acc_norm_stderr\": 0.02661648298050171\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n\ \ \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \ \ \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.32,\n\ \ \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \ \ \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n\ \ \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n\ \ \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n\ \ \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.2723404255319149,\n \"acc_stderr\": 0.0291012906983867,\n\ \ \"acc_norm\": 0.2723404255319149,\n \"acc_norm_stderr\": 0.0291012906983867\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2807017543859649,\n\ \ \"acc_stderr\": 0.042270544512322004,\n \"acc_norm\": 0.2807017543859649,\n\ \ \"acc_norm_stderr\": 0.042270544512322004\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n\ \ \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.25132275132275134,\n \"acc_stderr\": 0.022340482339643895,\n \"\ acc_norm\": 0.25132275132275134,\n \"acc_norm_stderr\": 0.022340482339643895\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15873015873015872,\n\ \ \"acc_stderr\": 0.03268454013011743,\n \"acc_norm\": 0.15873015873015872,\n\ \ \"acc_norm_stderr\": 0.03268454013011743\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.14,\n \"acc_stderr\": 0.03487350880197771,\n \ \ \"acc_norm\": 0.14,\n \"acc_norm_stderr\": 0.03487350880197771\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25806451612903225,\n\ \ \"acc_stderr\": 0.024892469172462826,\n \"acc_norm\": 0.25806451612903225,\n\ \ \"acc_norm_stderr\": 0.024892469172462826\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.21674876847290642,\n \"acc_stderr\": 0.028990331252516235,\n\ \ \"acc_norm\": 0.21674876847290642,\n \"acc_norm_stderr\": 0.028990331252516235\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.24848484848484848,\n \"acc_stderr\": 0.033744026441394036,\n\ \ \"acc_norm\": 0.24848484848484848,\n \"acc_norm_stderr\": 0.033744026441394036\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.35353535353535354,\n \"acc_stderr\": 0.03406086723547153,\n \"\ acc_norm\": 0.35353535353535354,\n \"acc_norm_stderr\": 0.03406086723547153\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n\ \ \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.3717948717948718,\n \"acc_stderr\": 0.02450347255711094,\n \ \ \"acc_norm\": 0.3717948717948718,\n \"acc_norm_stderr\": 0.02450347255711094\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24444444444444444,\n \"acc_stderr\": 0.026202766534652148,\n \ \ \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.026202766534652148\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.3025210084033613,\n \"acc_stderr\": 0.02983796238829193,\n \ \ \"acc_norm\": 0.3025210084033613,\n \"acc_norm_stderr\": 0.02983796238829193\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.23841059602649006,\n \"acc_stderr\": 0.0347918557259966,\n \"\ acc_norm\": 0.23841059602649006,\n \"acc_norm_stderr\": 0.0347918557259966\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.3486238532110092,\n \"acc_stderr\": 0.020431254090714328,\n \"\ acc_norm\": 0.3486238532110092,\n \"acc_norm_stderr\": 0.020431254090714328\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\"\ : 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n\ \ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n\ \ \"acc_stderr\": 0.030778554678693264,\n \"acc_norm\": 0.25980392156862747,\n\ \ \"acc_norm_stderr\": 0.030778554678693264\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.2616033755274262,\n \"acc_stderr\": 0.028609516716994934,\n\ \ \"acc_norm\": 0.2616033755274262,\n \"acc_norm_stderr\": 0.028609516716994934\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.10762331838565023,\n\ \ \"acc_stderr\": 0.020799400082879997,\n \"acc_norm\": 0.10762331838565023,\n\ \ \"acc_norm_stderr\": 0.020799400082879997\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.21374045801526717,\n \"acc_stderr\": 0.0359546161177469,\n\ \ \"acc_norm\": 0.21374045801526717,\n \"acc_norm_stderr\": 0.0359546161177469\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2231404958677686,\n \"acc_stderr\": 0.03800754475228733,\n \"\ acc_norm\": 0.2231404958677686,\n \"acc_norm_stderr\": 0.03800754475228733\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2037037037037037,\n\ \ \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.2037037037037037,\n\ \ \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.24539877300613497,\n \"acc_stderr\": 0.03380939813943354,\n\ \ \"acc_norm\": 0.24539877300613497,\n \"acc_norm_stderr\": 0.03380939813943354\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n\ \ \"acc_stderr\": 0.040073418097558065,\n \"acc_norm\": 0.23214285714285715,\n\ \ \"acc_norm_stderr\": 0.040073418097558065\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.3106796116504854,\n \"acc_stderr\": 0.0458212416016155,\n\ \ \"acc_norm\": 0.3106796116504854,\n \"acc_norm_stderr\": 0.0458212416016155\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.18803418803418803,\n\ \ \"acc_stderr\": 0.02559819368665225,\n \"acc_norm\": 0.18803418803418803,\n\ \ \"acc_norm_stderr\": 0.02559819368665225\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.227330779054917,\n\ \ \"acc_stderr\": 0.014987270640946015,\n \"acc_norm\": 0.227330779054917,\n\ \ \"acc_norm_stderr\": 0.014987270640946015\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.23410404624277456,\n \"acc_stderr\": 0.022797110278071138,\n\ \ \"acc_norm\": 0.23410404624277456,\n \"acc_norm_stderr\": 0.022797110278071138\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.2679738562091503,\n \"acc_stderr\": 0.025360603796242553,\n\ \ \"acc_norm\": 0.2679738562091503,\n \"acc_norm_stderr\": 0.025360603796242553\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.19292604501607716,\n\ \ \"acc_stderr\": 0.022411516780911366,\n \"acc_norm\": 0.19292604501607716,\n\ \ \"acc_norm_stderr\": 0.022411516780911366\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.023132376234543343,\n\ \ \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.023132376234543343\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2730496453900709,\n \"acc_stderr\": 0.026577860943307857,\n \ \ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.026577860943307857\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2470664928292047,\n\ \ \"acc_stderr\": 0.011015752255279338,\n \"acc_norm\": 0.2470664928292047,\n\ \ \"acc_norm_stderr\": 0.011015752255279338\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n\ \ \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.2549019607843137,\n \"acc_stderr\": 0.017630827375148383,\n \ \ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.017630827375148383\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n\ \ \"acc_stderr\": 0.040693063197213754,\n \"acc_norm\": 0.23636363636363636,\n\ \ \"acc_norm_stderr\": 0.040693063197213754\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.031362502409358936,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.031362502409358936\n \ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n\ \ \"acc_stderr\": 0.030360490154014638,\n \"acc_norm\": 0.24378109452736318,\n\ \ \"acc_norm_stderr\": 0.030360490154014638\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.19879518072289157,\n\ \ \"acc_stderr\": 0.031069390260789437,\n \"acc_norm\": 0.19879518072289157,\n\ \ \"acc_norm_stderr\": 0.031069390260789437\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.03615507630310935,\n\ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.03615507630310935\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23378212974296206,\n\ \ \"mc1_stderr\": 0.014816195991931583,\n \"mc2\": 0.39825983953563676,\n\ \ \"mc2_stderr\": 0.014916655527587098\n }\n}\n```" repo_url: https://huggingface.co/nicholasKluge/Aira-2-124M leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|arc:challenge|25_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hellaswag|10_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:58:54.483693.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-26T00:58:54.483693.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_26T00_58_54.483693 path: - '**/details_harness|truthfulqa:mc|0_2023-08-26T00:58:54.483693.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-26T00:58:54.483693.parquet' - config_name: results data_files: - split: 2023_08_26T00_58_54.483693 path: - results_2023-08-26T00:58:54.483693.parquet - split: latest path: - results_2023-08-26T00:58:54.483693.parquet --- # Dataset Card for Evaluation run of nicholasKluge/Aira-2-124M ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/nicholasKluge/Aira-2-124M - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [nicholasKluge/Aira-2-124M](https://huggingface.co/nicholasKluge/Aira-2-124M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nicholasKluge__Aira-2-124M", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-26T00:58:54.483693](https://huggingface.co/datasets/open-llm-leaderboard/details_nicholasKluge__Aira-2-124M/blob/main/results_2023-08-26T00%3A58%3A54.483693.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2563784281118179, "acc_stderr": 0.03131922643477471, "acc_norm": 0.25747333491194596, "acc_norm_stderr": 0.03133423457395941, "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931583, "mc2": 0.39825983953563676, "mc2_stderr": 0.014916655527587098 }, "harness|arc:challenge|25": { "acc": 0.2030716723549488, "acc_stderr": 0.011755899303705582, "acc_norm": 0.2431740614334471, "acc_norm_stderr": 0.012536554144587094 }, "harness|hellaswag|10": { "acc": 0.2907787293367855, "acc_stderr": 0.004531935391507024, "acc_norm": 0.3152758414658435, "acc_norm_stderr": 0.004636760762522853 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073461, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073461 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2490566037735849, "acc_stderr": 0.02661648298050171, "acc_norm": 0.2490566037735849, "acc_norm_stderr": 0.02661648298050171 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818318, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818318 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2723404255319149, "acc_stderr": 0.0291012906983867, "acc_norm": 0.2723404255319149, "acc_norm_stderr": 0.0291012906983867 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2807017543859649, "acc_stderr": 0.042270544512322004, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.042270544512322004 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25132275132275134, "acc_stderr": 0.022340482339643895, "acc_norm": 0.25132275132275134, "acc_norm_stderr": 0.022340482339643895 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.03268454013011743, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.03268454013011743 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.14, "acc_stderr": 0.03487350880197771, "acc_norm": 0.14, "acc_norm_stderr": 0.03487350880197771 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25806451612903225, "acc_stderr": 0.024892469172462826, "acc_norm": 0.25806451612903225, "acc_norm_stderr": 0.024892469172462826 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.21674876847290642, "acc_stderr": 0.028990331252516235, "acc_norm": 0.21674876847290642, "acc_norm_stderr": 0.028990331252516235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24848484848484848, "acc_stderr": 0.033744026441394036, "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.033744026441394036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35353535353535354, "acc_stderr": 0.03406086723547153, "acc_norm": 0.35353535353535354, "acc_norm_stderr": 0.03406086723547153 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3717948717948718, "acc_stderr": 0.02450347255711094, "acc_norm": 0.3717948717948718, "acc_norm_stderr": 0.02450347255711094 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.026202766534652148, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.026202766534652148 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3025210084033613, "acc_stderr": 0.02983796238829193, "acc_norm": 0.3025210084033613, "acc_norm_stderr": 0.02983796238829193 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.0347918557259966, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.0347918557259966 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3486238532110092, "acc_stderr": 0.020431254090714328, "acc_norm": 0.3486238532110092, "acc_norm_stderr": 0.020431254090714328 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693264, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693264 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.028609516716994934, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.10762331838565023, "acc_stderr": 0.020799400082879997, "acc_norm": 0.10762331838565023, "acc_norm_stderr": 0.020799400082879997 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.21374045801526717, "acc_stderr": 0.0359546161177469, "acc_norm": 0.21374045801526717, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2231404958677686, "acc_stderr": 0.03800754475228733, "acc_norm": 0.2231404958677686, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2037037037037037, "acc_stderr": 0.03893542518824847, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.24539877300613497, "acc_stderr": 0.03380939813943354, "acc_norm": 0.24539877300613497, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.040073418097558065, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.040073418097558065 }, "harness|hendrycksTest-management|5": { "acc": 0.3106796116504854, "acc_stderr": 0.0458212416016155, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.0458212416016155 }, "harness|hendrycksTest-marketing|5": { "acc": 0.18803418803418803, "acc_stderr": 0.02559819368665225, "acc_norm": 0.18803418803418803, "acc_norm_stderr": 0.02559819368665225 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.227330779054917, "acc_stderr": 0.014987270640946015, "acc_norm": 0.227330779054917, "acc_norm_stderr": 0.014987270640946015 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23410404624277456, "acc_stderr": 0.022797110278071138, "acc_norm": 0.23410404624277456, "acc_norm_stderr": 0.022797110278071138 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2679738562091503, "acc_stderr": 0.025360603796242553, "acc_norm": 0.2679738562091503, "acc_norm_stderr": 0.025360603796242553 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.19292604501607716, "acc_stderr": 0.022411516780911366, "acc_norm": 0.19292604501607716, "acc_norm_stderr": 0.022411516780911366 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2222222222222222, "acc_stderr": 0.023132376234543343, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.023132376234543343 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2730496453900709, "acc_stderr": 0.026577860943307857, "acc_norm": 0.2730496453900709, "acc_norm_stderr": 0.026577860943307857 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2470664928292047, "acc_stderr": 0.011015752255279338, "acc_norm": 0.2470664928292047, "acc_norm_stderr": 0.011015752255279338 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2549019607843137, "acc_stderr": 0.017630827375148383, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.017630827375148383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.040693063197213754, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.040693063197213754 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.031362502409358936, "acc_norm": 0.4, "acc_norm_stderr": 0.031362502409358936 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014638, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014638 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-virology|5": { "acc": 0.19879518072289157, "acc_stderr": 0.031069390260789437, "acc_norm": 0.19879518072289157, "acc_norm_stderr": 0.031069390260789437 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03615507630310935, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03615507630310935 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931583, "mc2": 0.39825983953563676, "mc2_stderr": 0.014916655527587098 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7082951664924622, -0.8354209661483765, 0.26904943585395813, 0.19504772126674652, -0.14622755348682404, -0.07947152107954025, 0.02828379161655903, -0.20818771421909332, 0.6042319536209106, -0.09094438701868057, -0.5062285661697388, -0.6485127210617065, -0.4378946125507355, 0.19561010599136353, -0.01648237369954586, 0.7832376956939697, -0.21446911990642548, -0.12358201295137405, 0.08544023334980011, -0.021993916481733322, -0.22715438902378082, -0.3729166090488434, -0.464320570230484, -0.3601117730140686, 0.16996435821056366, 0.4449547231197357, 0.45238828659057617, 0.8296343684196472, 0.667163610458374, 0.28529155254364014, -0.31956690549850464, -0.028184594586491585, -0.1683141589164734, -0.3255343735218048, 0.40902209281921387, -0.3321159780025482, -0.8341498970985413, 0.33230122923851013, 0.7615128755569458, 0.6408580541610718, -0.10926438122987747, 0.2787802517414093, 0.011786149814724922, 0.6040648221969604, -0.3715284466743469, 0.028530621901154518, -0.28830456733703613, 0.2163449227809906, -0.1678389012813568, -0.25778916478157043, -0.27571409940719604, -0.18053844571113586, -0.1474052220582962, -0.9238518476486206, 0.2324553281068802, 0.29760485887527466, 1.6245378255844116, -0.14497599005699158, -0.252992182970047, 0.10439161211252213, -0.1435897797346115, 1.0006417036056519, -0.8701381683349609, 0.3701656758785248, 0.785879373550415, 0.1522514820098877, -0.1771557331085205, -0.585824728012085, -0.63254714012146, 0.11766892671585083, -0.36562350392341614, 0.35405412316322327, -0.08493813872337341, -0.202632874250412, 0.35909098386764526, 0.6369888186454773, -0.6691393852233887, 0.14703166484832764, -0.6450705528259277, -0.15843963623046875, 1.0505064725875854, 0.3231132924556732, 0.08462151885032654, -0.3518291413784027, -0.6956276893615723, -0.649074375629425, -0.42301055788993835, 0.26090726256370544, 0.4471043348312378, 0.34434008598327637, -0.41554704308509827, 0.6931900978088379, -0.4375537633895874, 0.5426180362701416, 0.39699047803878784, 0.04266083240509033, 0.8978750705718994, -0.6623576879501343, -0.5457608699798584, -0.09194563329219818, 1.081011176109314, 0.5557973384857178, 0.048053301870822906, 0.21721534430980682, 0.02699957974255085, -0.0935015007853508, 0.04630244895815849, -0.8801450729370117, -0.2979566752910614, 0.1536862850189209, -0.4074043929576874, -0.5173583030700684, 0.3495069444179535, -0.8956270217895508, 0.172568678855896, -0.05811963602900505, 0.4389050304889679, -0.45329248905181885, -0.14016127586364746, 0.25269073247909546, -0.39996251463890076, 0.8367780447006226, -0.18748565018177032, -0.7934904098510742, 0.4158327281475067, 0.5186680555343628, 0.7451795339584351, -0.06692022830247879, -0.42914530634880066, -0.10399021208286285, -0.11610431969165802, -0.29115161299705505, 0.5520280599594116, -0.2861058712005615, -0.3882388770580292, -0.29917776584625244, 0.2908564805984497, -0.2836841344833374, -0.33781692385673523, 0.7181076407432556, -0.2171696573495865, 0.21022474765777588, -0.42101073265075684, -0.647322952747345, 0.1242276281118393, 0.38381263613700867, -0.4070143699645996, 1.2951278686523438, 0.27672523260116577, -0.7979229092597961, 0.41661620140075684, -0.5714072585105896, -0.11590897291898727, -0.04815565422177315, -0.0602603480219841, -0.7919981479644775, -0.28647395968437195, 0.18977272510528564, 0.391726016998291, -0.1255415678024292, -0.12695251405239105, -0.3921964466571808, -0.3394555151462555, 0.3342907428741455, -0.15389056503772736, 1.2028168439865112, -0.0011019763769581914, -0.7584338188171387, -0.12963633239269257, -1.218727707862854, 0.3069673180580139, 0.2102101892232895, -0.376603364944458, -0.1719023883342743, -0.4671597182750702, 0.004159227013587952, 0.22166715562343597, 0.2817092835903168, -0.7945192456245422, 0.2832659184932709, -0.37088873982429504, 0.14963509142398834, 1.2517653703689575, 0.03136347606778145, 0.14836110174655914, -0.5637081861495972, 0.544916570186615, 0.2093992531299591, 0.18256066739559174, 0.3894326090812683, -0.6087764501571655, -0.7960662841796875, -0.46496233344078064, -0.06214870885014534, 0.607927680015564, -0.20435872673988342, 1.144181728363037, 0.0746384784579277, -0.9096632599830627, -0.4188068211078644, -0.12285137921571732, 0.4958445727825165, 0.7690328359603882, 0.5914657115936279, -0.011554350145161152, -0.6278512477874756, -1.0819966793060303, -0.3147621154785156, -0.2020939588546753, 0.16744336485862732, 0.24830657243728638, 1.0285416841506958, -0.2660280764102936, 0.5538953542709351, -1.056616187095642, -0.20073145627975464, 0.19136294722557068, -0.06841923296451569, 0.7636186480522156, 0.7618292570114136, 0.6081822514533997, -0.6555622220039368, -0.5174281001091003, 0.1791667491197586, -0.8907619714736938, -0.09569600969552994, 0.11317223310470581, -0.34888964891433716, 0.13356170058250427, 0.15604929625988007, -0.7071282863616943, 0.5613499879837036, 0.2409067004919052, -1.1401090621948242, 1.037087321281433, -0.3178701400756836, 0.5672162771224976, -1.0437850952148438, 0.2128176987171173, -0.05476878210902214, 0.0645732507109642, -0.524895191192627, 0.06338008493185043, 0.07032875716686249, 0.4222060441970825, -0.5122151374816895, 0.8173545598983765, -0.673004686832428, -0.06772836297750473, 0.4289170503616333, 0.11365985125303268, -0.11106972396373749, 0.38030147552490234, -0.219743549823761, 0.7843766212463379, 0.7777665853500366, -0.45639896392822266, 0.4991137981414795, 0.4364280700683594, -0.19568510353565216, 0.6973099708557129, -0.47361892461776733, -0.29084646701812744, 0.3121562600135803, -0.04347427189350128, -0.8399658799171448, -0.4881201684474945, 0.03688078001141548, -0.6429756879806519, -0.12505453824996948, 0.36831411719322205, -0.2770739793777466, -0.7845154404640198, -0.924251914024353, 0.3470448851585388, 0.7116597890853882, -0.4452296495437622, -0.14857394993305206, 0.07628770917654037, 0.10164128243923187, -0.8225137591362, -0.8215295076370239, -0.4897260069847107, -0.213912233710289, -0.7190529704093933, 0.31072816252708435, -0.26556405425071716, -0.29861557483673096, -0.061045657843351364, -0.24953921139240265, -0.35723063349723816, 0.03541496768593788, 0.14656202495098114, 0.6683541536331177, -0.40087151527404785, -0.3062209188938141, -0.23298843204975128, -0.1818198412656784, 0.2460588961839676, -0.12316162139177322, 0.36683061718940735, -0.47438833117485046, -0.37882664799690247, -0.4392656087875366, -0.04352854564785957, 0.7213702201843262, -0.04600323364138603, 0.7460436820983887, 0.42746689915657043, -0.3541659116744995, -0.007079788949340582, -0.2723012864589691, -0.29924577474594116, -0.5801860690116882, 0.23543331027030945, -0.49982449412345886, -1.0251502990722656, 0.7997258305549622, 0.5231461524963379, 0.043875519186258316, 1.170323133468628, 0.575439453125, -0.29149529337882996, 1.023180603981018, 0.011480102315545082, 0.3197174668312073, 0.36831480264663696, -0.7300286293029785, 0.09590788185596466, -0.930896520614624, -0.3406490385532379, -0.613515317440033, -0.5022372007369995, -0.6836191415786743, -0.08529076725244522, 0.27139395475387573, 0.20041300356388092, -0.6617071628570557, 0.589600682258606, -0.8216811418533325, 0.5833641290664673, 0.5384342074394226, 0.23402391374111176, 0.13255998492240906, -0.14205124974250793, -0.41025176644325256, -0.11739079654216766, -0.4702138602733612, -0.2417474091053009, 1.2268925905227661, 0.278168648481369, 0.723014771938324, 0.09341337531805038, 0.8664379119873047, 0.05519230663776398, -0.1064877063035965, -0.5851166248321533, 0.6274685263633728, 0.10568017512559891, -0.8154836893081665, -0.40725284814834595, -0.5141047835350037, -1.1149039268493652, 0.4028787314891815, -0.13085754215717316, -0.8282563090324402, 0.11795665323734283, 0.019074827432632446, -0.17551396787166595, 0.5122330188751221, -0.5337702631950378, 0.8428882360458374, -0.09618907421827316, -0.43993163108825684, 0.07623115181922913, -0.8277761340141296, 0.43664979934692383, 0.20986466109752655, 0.26219552755355835, 0.028192387893795967, 0.2313617616891861, 1.2154901027679443, -0.8372671604156494, 0.40081170201301575, 0.04202757030725479, 0.03717997670173645, 0.346903920173645, -0.1716335415840149, 0.4933912754058838, 0.08839595317840576, -0.006912631914019585, -0.11445599049329758, 0.2902635931968689, -0.8792495131492615, -0.07195653021335602, 0.9589137434959412, -1.0085153579711914, -0.5550829768180847, -0.9057133793830872, -0.4975070655345917, 0.07262948155403137, 0.5762943029403687, 0.3455353379249573, 0.5554754734039307, 0.01372632384300232, 0.4561314880847931, 0.8130227327346802, -0.12302296608686447, 0.6117770671844482, 0.27861514687538147, 0.08526373654603958, -0.6992258429527283, 0.8551510572433472, 0.09561450034379959, 0.342637836933136, 0.27263835072517395, 0.3969074785709381, -0.5243930220603943, -0.19555826485157013, -0.23881740868091583, 0.5036873817443848, -0.6420436501502991, -0.26306986808776855, -0.3690798878669739, -0.37754279375076294, -0.769294261932373, -0.6476959586143494, -0.30685025453567505, -0.5676662921905518, -0.5246009826660156, -0.5045294165611267, 0.5680936574935913, 0.48355361819267273, -0.3866502642631531, 0.04255060479044914, -0.4826948642730713, 0.27772650122642517, 0.3391708433628082, 0.5565651655197144, -0.39331871271133423, -0.5837216377258301, 0.03784878924489021, -0.1357383131980896, -0.5708635449409485, -0.9358536005020142, 0.3421764373779297, -0.04487014561891556, 0.528867781162262, 0.5712192058563232, 0.028020327910780907, 0.8382641077041626, -0.19235055148601532, 1.065277338027954, 0.29446014761924744, -0.7842710614204407, 0.7405758500099182, -0.34873467683792114, 0.20548704266548157, 0.664936900138855, 0.17366600036621094, -0.20387932658195496, -0.6688874959945679, -1.3470085859298706, -0.7891972064971924, 0.6922821998596191, 0.4345560669898987, -0.2591814696788788, 0.047157350927591324, 0.1605534553527832, -0.3211648464202881, -0.20330019295215607, -0.6430385708808899, -0.8997600674629211, -0.16467000544071198, -0.483020156621933, 0.12236197292804718, 0.059363123029470444, -0.3886431157588959, -0.8215966820716858, 1.008272409439087, -0.01035243272781372, 0.6113218069076538, 0.47227853536605835, 0.08926766365766525, 0.03872939199209213, 0.4616575241088867, 0.9367864727973938, 0.736018180847168, -0.4457254111766815, 0.41683220863342285, 0.3774593770503998, -1.0461077690124512, 0.49835044145584106, 0.346666157245636, -0.08369655907154083, -0.027876071631908417, 0.4553626775741577, 0.4134669005870819, 0.03248520568013191, -0.20590440928936005, 0.6501556038856506, -0.02777095139026642, -0.6048792600631714, -0.3916388750076294, 0.06669898331165314, -0.11098688840866089, -0.0494694747030735, 0.4102245271205902, -0.1380593478679657, -0.040351878851652145, -0.4693181812763214, 0.5135644674301147, 0.36857327818870544, -0.49888062477111816, -0.16607239842414856, 0.7286113500595093, -0.17000192403793335, -0.12910506129264832, 0.333025723695755, -0.19334910809993744, -0.6265458464622498, 1.176954746246338, 0.5752637982368469, 0.7236160635948181, -0.25364959239959717, -0.06828019767999649, 0.9359498023986816, 0.39395076036453247, -0.038436297327280045, 0.566413938999176, 0.3616909086704254, -0.2641034424304962, 0.1622263789176941, -0.8652341365814209, -0.03255276754498482, 0.19434386491775513, -0.8356484174728394, 0.33750292658805847, -0.5255657434463501, -0.14218199253082275, 0.02782260626554489, 0.4160619080066681, -0.43440312147140503, 0.5617489814758301, -0.39189085364341736, 1.2305285930633545, -1.0164986848831177, 0.715904951095581, 0.7740198969841003, -0.5233863592147827, -1.0857174396514893, -0.5205504894256592, 0.04902810975909233, -0.7832096815109253, 0.5692800879478455, -0.01972484588623047, 0.16835591197013855, -0.08464232087135315, -0.7179999947547913, -0.9339520931243896, 1.4213132858276367, -0.059043943881988525, -0.4151054322719574, 0.23149016499519348, -0.06899990886449814, 0.4465503394603729, 0.12580356001853943, 0.5788387060165405, 0.7432937026023865, 0.8100452423095703, -0.08562853187322617, -0.7477905750274658, 0.33625784516334534, -0.5463045835494995, -0.3266647160053253, 0.4909978210926056, -0.9475958347320557, 1.1672790050506592, 0.0335860513150692, 0.21463999152183533, -0.16273275017738342, 0.6506182551383972, 0.7756460905075073, 0.29544803500175476, 0.3726862072944641, 0.9693943858146667, 0.8259950280189514, -0.5057170987129211, 0.9893364906311035, -0.24165643751621246, 0.8741117715835571, 0.6808366775512695, 0.2466314136981964, 0.7756454944610596, 0.6782187223434448, -0.5550530552864075, 0.5754532814025879, 0.7731413245201111, -0.2996690571308136, 0.37750136852264404, 0.26317816972732544, -0.13492372632026672, -0.12826845049858093, 0.420806884765625, -0.8693099021911621, 0.1277584582567215, 0.07399322092533112, -0.33699217438697815, 0.07681699097156525, -0.4584618806838989, 0.3298700749874115, -0.0525306835770607, -0.02496502175927162, 0.4115937054157257, 0.035385701805353165, -0.452238529920578, 0.956247091293335, -0.13317988812923431, 0.7333266735076904, -0.5191771388053894, -0.10678009688854218, -0.3567957282066345, 0.5834445357322693, -0.43833523988723755, -1.0302846431732178, 0.18413522839546204, 0.06819398701190948, -0.10353688150644302, -0.1659720540046692, 0.7099071741104126, -0.21358487010002136, -0.7929418087005615, 0.13399359583854675, 0.05238877609372139, 0.10840631276369095, 0.5069049000740051, -0.6831809878349304, -0.33967941999435425, -0.0564892515540123, -0.5677923560142517, 0.12505663931369781, 0.2753635048866272, 0.315937340259552, 0.536496102809906, 0.6410205364227295, 0.15303194522857666, 0.40282267332077026, -0.5621947050094604, 0.8114272356033325, -1.0447803735733032, -0.7452861666679382, -0.905349850654602, 0.44909074902534485, -0.28656962513923645, -0.8832198977470398, 0.9846063256263733, 1.0814716815948486, 0.8787832856178284, 0.00006235744513105601, 0.6385673880577087, -0.4031828045845032, 0.2501576542854309, -0.407497763633728, 0.9251798391342163, -0.8462833166122437, -0.22379307448863983, -0.2518264949321747, -0.7060943841934204, -0.3749777674674988, 0.8333140015602112, -0.11955474317073822, 0.042273569852113724, 1.0894625186920166, 0.7006148099899292, -0.10309180617332458, 0.03420400246977806, -0.07411330938339233, 0.5521975159645081, 0.40741676092147827, 0.9972373247146606, 0.6230058670043945, -0.8212667107582092, 0.3376060724258423, -0.5283507704734802, -0.4026528596878052, -0.39669114351272583, -0.4507061839103699, -0.8552825450897217, -0.4924840033054352, -0.22759512066841125, -0.6511672139167786, -0.13788339495658875, 1.010498285293579, 0.4930723011493683, -0.9180711507797241, -0.42602336406707764, -0.14423540234565735, 0.12456696480512619, -0.6093935966491699, -0.42261946201324463, 0.7039021253585815, -0.11680565774440765, -0.5300918817520142, 0.2261686474084854, -0.16076476871967316, 0.24026378989219666, 0.10510784387588501, -0.43821004033088684, -0.7505270838737488, 0.05974791944026947, 0.39440250396728516, 0.33662211894989014, -0.6959543824195862, -0.7071741223335266, 0.3142636716365814, -0.5189990997314453, 0.42108261585235596, -0.0135193457826972, -0.516430675983429, 0.05289193615317345, 0.7246039509773254, 0.492943674325943, 0.6718040704727173, -0.016143443062901497, 0.07463788986206055, -0.672546923160553, 0.16996262967586517, -0.014891617931425571, 0.28092291951179504, -0.04607250913977623, -0.3340711295604706, 0.7977890372276306, 0.6904696226119995, -0.5517665147781372, -1.0579698085784912, -0.4368898868560791, -1.457265853881836, -0.054242223501205444, 1.1470022201538086, 0.005989029072225094, -0.4651270806789398, 0.2563828229904175, -0.12153574824333191, 0.18324479460716248, -0.33083802461624146, 0.7324386239051819, 0.8159886598587036, -0.3553328216075897, 0.16324017941951752, -0.6438130736351013, 0.38480624556541443, 0.5440471172332764, -1.2352793216705322, -0.08400355279445648, 0.2658959627151489, 0.3154558837413788, 0.35664641857147217, 0.6259682774543762, -0.11422071605920792, 0.2827286422252655, 0.23874185979366302, 0.017625661566853523, -0.002315609250217676, 0.013097918592393398, -0.29697415232658386, 0.0689079612493515, -0.2430926114320755, -0.43717679381370544 ]
open-llm-leaderboard/details_TehVenom__oasst-sft-6-llama-33b-xor-MERGED-16bit
open-llm-leaderboard
2023-10-19T06:08:53Z
201
0
[ "region:us" ]
null
2023-08-27T11:53:29Z
--- pretty_name: Evaluation run of TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit](https://huggingface.co/TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TehVenom__oasst-sft-6-llama-33b-xor-MERGED-16bit\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T06:08:41.301951](https://huggingface.co/datasets/open-llm-leaderboard/details_TehVenom__oasst-sft-6-llama-33b-xor-MERGED-16bit/blob/main/results_2023-10-19T06-08-41.301951.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.30484479865771813,\n\ \ \"em_stderr\": 0.004714329508547516,\n \"f1\": 0.37780411073825637,\n\ \ \"f1_stderr\": 0.00458553371661785,\n \"acc\": 0.5478104301589363,\n\ \ \"acc_stderr\": 0.012054873797698495\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.30484479865771813,\n \"em_stderr\": 0.004714329508547516,\n\ \ \"f1\": 0.37780411073825637,\n \"f1_stderr\": 0.00458553371661785\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.30477634571645185,\n \ \ \"acc_stderr\": 0.012679297549515401\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7908445146014207,\n \"acc_stderr\": 0.011430450045881587\n\ \ }\n}\n```" repo_url: https://huggingface.co/TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|arc:challenge|25_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-24T10:25:39.689154.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T06_08_41.301951 path: - '**/details_harness|drop|3_2023-10-19T06-08-41.301951.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T06-08-41.301951.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T06_08_41.301951 path: - '**/details_harness|gsm8k|5_2023-10-19T06-08-41.301951.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T06-08-41.301951.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hellaswag|10_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-24T10:25:39.689154.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-management|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-24T10:25:39.689154.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_24T10_25_39.689154 path: - '**/details_harness|truthfulqa:mc|0_2023-08-24T10:25:39.689154.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-24T10:25:39.689154.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T06_08_41.301951 path: - '**/details_harness|winogrande|5_2023-10-19T06-08-41.301951.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T06-08-41.301951.parquet' - config_name: results data_files: - split: 2023_10_19T06_08_41.301951 path: - results_2023-10-19T06-08-41.301951.parquet - split: latest path: - results_2023-10-19T06-08-41.301951.parquet --- # Dataset Card for Evaluation run of TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit](https://huggingface.co/TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TehVenom__oasst-sft-6-llama-33b-xor-MERGED-16bit", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T06:08:41.301951](https://huggingface.co/datasets/open-llm-leaderboard/details_TehVenom__oasst-sft-6-llama-33b-xor-MERGED-16bit/blob/main/results_2023-10-19T06-08-41.301951.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.30484479865771813, "em_stderr": 0.004714329508547516, "f1": 0.37780411073825637, "f1_stderr": 0.00458553371661785, "acc": 0.5478104301589363, "acc_stderr": 0.012054873797698495 }, "harness|drop|3": { "em": 0.30484479865771813, "em_stderr": 0.004714329508547516, "f1": 0.37780411073825637, "f1_stderr": 0.00458553371661785 }, "harness|gsm8k|5": { "acc": 0.30477634571645185, "acc_stderr": 0.012679297549515401 }, "harness|winogrande|5": { "acc": 0.7908445146014207, "acc_stderr": 0.011430450045881587 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4589884877204895, -0.6527136564254761, 0.24593167006969452, 0.07466022670269012, -0.1992398500442505, 0.2118261456489563, -0.35935643315315247, -0.24852952361106873, 0.424311101436615, 0.5711329579353333, -0.6656219959259033, -0.8738918304443359, -0.6756311655044556, 0.16717103123664856, -0.23174357414245605, 1.1547064781188965, -0.32751381397247314, -0.2632671892642975, 0.10890356451272964, -0.3038271367549896, -0.3228778839111328, -0.41505369544029236, -0.47074297070503235, -0.36934301257133484, 0.3575756549835205, 0.6158232092857361, 0.3409745395183563, 0.7524343132972717, 0.7066966891288757, 0.4268675148487091, -0.14253902435302734, 0.20402555167675018, -0.4996093511581421, -0.1624634861946106, 0.1585555225610733, -0.589045524597168, -0.7931684255599976, 0.1191268339753151, 0.7197139859199524, 0.4542965888977051, -0.21703636646270752, 0.5980803370475769, 0.09822993725538254, 0.5517898797988892, -0.42787572741508484, 0.3852829337120056, -0.30750572681427, -0.000992822926491499, -0.36516696214675903, -0.17133942246437073, 0.051825881004333496, -0.3741532266139984, -0.13096918165683746, -0.4694012999534607, 0.15728427469730377, 0.1485600471496582, 1.0504051446914673, 0.19012100994586945, -0.17143946886062622, -0.08181499689817429, -0.35268840193748474, 0.8690046072006226, -0.8422983884811401, 0.021701833233237267, 0.5852988362312317, 0.15049098432064056, -0.24009808897972107, -0.5682126879692078, -0.3907022476196289, -0.10688021779060364, -0.21837086975574493, 0.17999540269374847, -0.0005443039699457586, -0.1282302886247635, 0.3813823461532593, 0.7636110186576843, -0.6652051210403442, 0.02325785532593727, -0.6621937155723572, -0.08542817085981369, 0.943846583366394, 0.3317001163959503, 0.07326182723045349, -0.47356167435646057, -0.5007584691047668, -0.3453645706176758, -0.3436008393764496, 0.1140020415186882, 0.4423784017562866, 0.4423629939556122, -0.641539454460144, 0.8621905446052551, -0.3827519118785858, 0.47540026903152466, -0.0791802853345871, -0.24555183947086334, 0.847599983215332, -0.6561254262924194, -0.21509036421775818, 0.08841356635093689, 1.0860844850540161, 0.4415143132209778, -0.017267990857362747, 0.16141507029533386, -0.34378254413604736, 0.0014943138230592012, 0.058802809566259384, -0.7030904293060303, -0.05373067781329155, 0.41451218724250793, -0.5104608535766602, -0.43320319056510925, 0.33505913615226746, -0.8310741782188416, -0.1410239040851593, -0.21206605434417725, 0.17364440858364105, -0.2709697186946869, -0.41071754693984985, -0.04080476984381676, -0.17454682290554047, 0.22631485760211945, 0.14543595910072327, -0.4802747666835785, 0.34501367807388306, 0.6358256936073303, 0.9987158179283142, -0.16061601042747498, -0.41313859820365906, -0.3909810185432434, -0.23870043456554413, -0.1760389655828476, 0.3918648958206177, -0.1270894706249237, -0.35903385281562805, -0.22319327294826508, 0.2500276267528534, -0.25488853454589844, -0.5595732927322388, 0.651922345161438, -0.3229328691959381, 0.2800087630748749, -0.24473176896572113, -0.4348026514053345, -0.12852223217487335, 0.39823397994041443, -0.718825101852417, 1.4256902933120728, 0.4814707338809967, -0.8684900999069214, 0.10350710153579712, -0.8945958614349365, -0.1505267173051834, 0.040208831429481506, 0.040846772491931915, -0.5101698637008667, -0.10028733313083649, 0.03506546467542648, 0.5004203915596008, -0.40760475397109985, 0.07603034377098083, -0.23380695283412933, -0.41013482213020325, 0.1090082973241806, -0.08902854472398758, 1.0240281820297241, 0.20813636481761932, -0.47403088212013245, 0.12685103714466095, -0.979034960269928, 0.08819621056318283, 0.3653896450996399, -0.5349826812744141, -0.21081148087978363, -0.27743762731552124, 0.16964228451251984, 0.19047965109348297, 0.5799114108085632, -0.6100302934646606, 0.37941277027130127, -0.1881072074174881, 0.4102289080619812, 0.9514123201370239, -0.05017082020640373, 0.2696968615055084, -0.5044184923171997, 0.49527251720428467, 0.045933209359645844, 0.26604214310646057, 0.1075059324502945, -0.5674858093261719, -0.7302806973457336, -0.21253329515457153, 0.12274874746799469, 0.6746833324432373, -0.5497901439666748, 0.82504802942276, -0.4522481858730316, -0.7131308317184448, -0.6725850701332092, 0.08903412520885468, 0.4556746482849121, 0.5881851315498352, 0.42666107416152954, -0.28203481435775757, -0.6874362230300903, -0.9357523322105408, 0.09667235612869263, -0.3147434592247009, 0.0582464337348938, 0.42880162596702576, 1.0015908479690552, -0.32540538907051086, 0.7055816054344177, -0.7279213666915894, -0.3315976560115814, -0.20113159716129303, 0.05241130292415619, 0.7242017388343811, 0.49022215604782104, 0.4615113139152527, -0.6689770221710205, -0.34250736236572266, 0.09081879258155823, -0.7758097052574158, -0.23079049587249756, -0.09959924966096878, -0.20950932800769806, 0.3436846435070038, -0.009236909449100494, -0.4893057942390442, 0.4442702531814575, 0.5319703221321106, -0.5656037330627441, 0.6628157496452332, -0.01759140007197857, 0.5446330904960632, -1.2063887119293213, 0.12436605244874954, 0.04076302424073219, -0.07193151861429214, -0.3753495514392853, -0.09055321663618088, -0.02489033341407776, 0.3831123113632202, -0.4571434557437897, 0.6199095845222473, -0.4377177655696869, -0.13471120595932007, 0.03655051440000534, 0.13952390849590302, -0.14531241357326508, 0.5340322256088257, -0.3063083589076996, 0.7672013640403748, 0.42647650837898254, -0.27601364254951477, 0.41662177443504333, 0.45074543356895447, -0.5339010953903198, 0.2759803533554077, -0.47338777780532837, -0.029907383024692535, 0.2026897370815277, 0.10783281177282333, -0.9670057892799377, -0.37077635526657104, 0.42599916458129883, -0.519118070602417, 0.18318139016628265, -0.23845338821411133, -0.6090710759162903, -0.48498183488845825, -0.6001513600349426, 0.250784307718277, 0.5510962605476379, -0.4930723309516907, 0.2965334355831146, 0.3490116596221924, 0.062107253819704056, -0.6415293216705322, -0.7635622024536133, -0.148444265127182, -0.37381619215011597, -0.7388060092926025, 0.4911494553089142, -0.1704198569059372, -0.2525757849216461, 0.1096045970916748, -0.09200019389390945, -0.04093165695667267, 0.08353689312934875, 0.32423287630081177, 0.6147278547286987, -0.23584114015102386, -0.40958309173583984, -0.18698535859584808, -0.14086735248565674, 0.07842539995908737, 0.09933997690677643, 0.589179277420044, -0.37286773324012756, -0.2532098889350891, -0.17851613461971283, 0.13820336759090424, 0.5594216585159302, -0.09072643518447876, 0.8438012599945068, 0.6617951393127441, -0.2145315408706665, 0.025959499180316925, -0.47203028202056885, 0.03556950017809868, -0.47636380791664124, 0.3277834355831146, -0.26282042264938354, -0.8866000771522522, 0.8679659962654114, 0.2409093677997589, 0.1875033676624298, 0.7590070962905884, 0.5609241724014282, 0.07205544412136078, 0.7442541122436523, 0.23174364864826202, -0.11912266165018082, 0.6056312918663025, -0.7775783538818359, -0.09559399634599686, -1.0505155324935913, -0.36620643734931946, -0.42401403188705444, -0.3995644748210907, -0.8989709615707397, -0.4126172363758087, 0.23243366181850433, 0.13965363800525665, -0.45670750737190247, 0.5834912657737732, -0.6814555525779724, 0.2240813821554184, 0.639750599861145, 0.19518312811851501, 0.11962881684303284, -0.04260982945561409, -0.03362957760691643, 0.25280168652534485, -0.4770084321498871, -0.4078255593776703, 1.2480370998382568, 0.19592255353927612, 0.5846816301345825, -0.007764177862554789, 1.023383378982544, 0.3128413259983063, 0.22861523926258087, -0.4929516613483429, 0.672673225402832, -0.04514439031481743, -0.46822217106819153, -0.18875807523727417, -0.622665524482727, -0.9009516835212708, 0.19601432979106903, -0.01227131113409996, -1.0246493816375732, 0.0646197572350502, -0.06569278985261917, -0.06390895694494247, 0.4313799738883972, -0.6191372275352478, 0.7831432223320007, -0.27569103240966797, -0.41260507702827454, 0.05337783694267273, -0.885676383972168, 0.5284594893455505, -0.03861038014292717, 0.27216780185699463, -0.2342430204153061, -0.046494994312524796, 1.1414213180541992, -0.7444819808006287, 0.684208869934082, -0.0883772149682045, 0.08293164521455765, 0.3638540506362915, -0.4281385838985443, 0.5490568280220032, -0.15484492480754852, -0.22699525952339172, 0.5508261322975159, -0.0953952968120575, -0.2545386850833893, -0.29576075077056885, 0.9452740550041199, -0.963801383972168, -0.3861382007598877, -0.4989337623119354, -0.543450117111206, 0.2691870331764221, 0.24875371158123016, 0.38233792781829834, 0.17985114455223083, 0.08296320587396622, 0.24245715141296387, 0.26432543992996216, -0.10274529457092285, 0.4545970559120178, 0.35736483335494995, -0.18554778397083282, -0.7803819179534912, 0.684546709060669, 0.34168121218681335, 0.12303429841995239, 0.21295394003391266, 0.11230938136577606, -0.5138810873031616, -0.3670524060726166, -0.3450908064842224, 0.34905409812927246, -0.5360780358314514, -0.34223055839538574, -0.3563540279865265, -0.3095623254776001, -0.3076516091823578, -0.08586608618497849, -0.4373311996459961, -0.4936801791191101, -0.398796945810318, -0.23853911459445953, 0.6921984553337097, 0.5871148705482483, -0.35490119457244873, 0.3773028552532196, -0.8832564949989319, 0.22633077204227448, -0.2984539270401001, 0.42097675800323486, -0.1334977000951767, -0.5670825242996216, -0.3957643210887909, 0.09612729400396347, -0.34196120500564575, -0.9141846895217896, 0.543581485748291, -0.09074252843856812, 0.6409869194030762, 0.16828368604183197, 0.1865747720003128, 0.7608502507209778, -0.22477327287197113, 1.0475927591323853, 0.07417236268520355, -0.7531830072402954, 0.733208417892456, -0.2971935570240021, 0.12491588294506073, 0.4854395389556885, 0.21859844028949738, -0.4111189544200897, -0.27505046129226685, -0.9723142981529236, -1.1774793863296509, 1.1388664245605469, 0.5293265581130981, -0.3983449339866638, 0.11054310202598572, 0.27833887934684753, -0.06006617471575737, 0.1914171427488327, -0.5650137066841125, -0.7665413618087769, -0.07742190361022949, -0.24531295895576477, -0.04625268280506134, -0.02187785878777504, -0.4272342920303345, -0.38122430443763733, 0.856882631778717, 0.03354352340102196, 0.47096678614616394, 0.13956229388713837, -0.043457359075546265, -0.09117219597101212, 0.25001218914985657, 0.45589667558670044, 0.5871912240982056, -0.4544546604156494, -0.05867608264088631, 0.27493172883987427, -0.6084752678871155, 0.1181717962026596, 0.3588337302207947, 0.001645914395339787, -0.1328955888748169, 0.6094853281974792, 0.9733536243438721, 0.07314251363277435, -0.40614500641822815, 0.4383138418197632, 0.046833958476781845, -0.3379462957382202, -0.3863963186740875, 0.221267968416214, -0.11771148443222046, 0.45947369933128357, 0.4203612804412842, -0.05380783602595329, -0.0039696102030575275, -0.3073960542678833, 0.25914931297302246, 0.191053569316864, -0.1012420579791069, -0.3212295174598694, 0.6169072985649109, -0.09188737720251083, -0.3365730941295624, 0.802868664264679, -0.06996743381023407, -0.6362149119377136, 1.0636956691741943, 0.3018955886363983, 0.9015020728111267, -0.11945611238479614, 0.0511179119348526, 0.533847451210022, 0.3513941168785095, -0.19057469069957733, 0.6218767762184143, 0.015210429206490517, -0.5971097946166992, -0.28249508142471313, -0.6935326457023621, -0.17672310769557953, 0.2991667091846466, -1.0388280153274536, 0.41222041845321655, -0.07134899497032166, -0.3230051100254059, -0.14593929052352905, 0.43925175070762634, -0.7832359671592712, 0.13054876029491425, 0.010404552333056927, 0.9329304099082947, -1.0271998643875122, 0.6085514426231384, 0.8672075271606445, -0.5412505269050598, -0.8835504055023193, -0.33605071902275085, 0.06298825889825821, -0.7866525650024414, 0.44059687852859497, 0.22952380776405334, 0.32237324118614197, -0.18240204453468323, -0.5647578239440918, -1.0548365116119385, 1.5372111797332764, 0.21393686532974243, -0.5680243372917175, 0.12730559706687927, 0.20506979525089264, 0.36127182841300964, -0.2575085163116455, 0.5415070056915283, 0.7959243655204773, 0.7635470032691956, 0.05934249609708786, -0.9612465500831604, 0.3345339000225067, -0.5154551863670349, -0.10583849251270294, 0.3876296281814575, -0.8932911157608032, 1.0450509786605835, -0.22122925519943237, -0.021206408739089966, -0.012904008850455284, 0.42243674397468567, 0.5968266725540161, 0.25973188877105713, 0.50306236743927, 0.6564082503318787, 0.7078664302825928, -0.4379385709762573, 1.0547115802764893, -0.3295791447162628, 0.8763322830200195, 1.0513546466827393, -0.01846163533627987, 0.7809381484985352, 0.4388112723827362, -0.43132489919662476, 0.4557673931121826, 0.8547173142433167, -0.3831665813922882, 0.45127782225608826, 0.08571196347475052, -0.01513376273214817, -0.022012582048773766, 0.03347910940647125, -0.5569812655448914, 0.4020006060600281, 0.1540202647447586, -0.568902850151062, -0.20650461316108704, -0.28584516048431396, 0.06420110911130905, -0.3713894486427307, -0.1964583545923233, 0.5686725974082947, -0.013626471161842346, -0.49975547194480896, 0.731339156627655, -0.0847141444683075, 0.712714433670044, -0.6511784791946411, -0.15003368258476257, -0.20756544172763824, 0.2710365056991577, -0.5558391809463501, -1.0974842309951782, 0.2428307980298996, 0.06845062971115112, -0.25251707434654236, -0.1956339329481125, 0.5712243318557739, -0.318366676568985, -0.4932769238948822, 0.497283399105072, 0.3827366530895233, 0.3413475453853607, 0.12650854885578156, -0.8667414784431458, 0.2800145447254181, 0.26557835936546326, -0.8060309886932373, 0.37104532122612, 0.2843991219997406, -0.037143364548683167, 0.48580148816108704, 0.6960602402687073, 0.12048869580030441, 0.1044953316450119, -0.1367824375629425, 1.1244494915008545, -0.7695896029472351, -0.3842703402042389, -0.8427894711494446, 0.9372074007987976, -0.26902979612350464, -0.6063534617424011, 0.8091792464256287, 0.9457507133483887, 0.8687198162078857, 0.09027886390686035, 0.8137311935424805, -0.47272032499313354, 0.3925650715827942, -0.367666631937027, 0.83876633644104, -0.8224895596504211, 0.3714619278907776, -0.1567862182855606, -0.8538985848426819, -0.013403572142124176, 0.6381251215934753, -0.2020173817873001, 0.0195331908762455, 0.5860042572021484, 0.8967885971069336, -0.022606397047638893, 0.09657872468233109, 0.037153370678424835, 0.43102025985717773, 0.2710358500480652, 0.6252350807189941, 0.6837953925132751, -0.7149685025215149, 0.44101089239120483, -0.6153613924980164, -0.4600427448749542, -0.2095695286989212, -0.6903679966926575, -0.7387526035308838, -0.5738281011581421, -0.3766774535179138, -0.5598349571228027, -0.05389116331934929, 1.0646201372146606, 0.44495880603790283, -0.7899274826049805, -0.3874525725841522, 0.03650848940014839, 0.19000136852264404, -0.21213367581367493, -0.349942147731781, 0.5711327195167542, -0.024968136101961136, -0.8087130188941956, 0.32864853739738464, -0.08634637296199799, -0.15285828709602356, -0.06283903121948242, -0.23482613265514374, -0.3628025949001312, -0.2562905251979828, 0.4643833041191101, 0.15584853291511536, -0.6857339143753052, -0.2923770844936371, -0.0766415074467659, -0.07251742482185364, 0.3854939341545105, 0.2953478991985321, -0.5792302489280701, 0.03485800698399544, 0.6966232657432556, 0.2514371871948242, 0.6750052571296692, 0.048960767686367035, 0.25625455379486084, -0.7071307897567749, -0.11239524185657501, -0.036451131105422974, 0.5762889385223389, 0.17668195068836212, -0.4716954231262207, 1.0715363025665283, 0.33198288083076477, -0.6690896153450012, -0.9718135595321655, -0.30224257707595825, -1.3004711866378784, 0.023329319432377815, 1.3577523231506348, -0.2395513504743576, -0.3965752422809601, 0.14283528923988342, -0.21652717888355255, 0.42390382289886475, -0.7107065916061401, 0.5053984522819519, 0.6791610717773438, -0.32587844133377075, 0.016843894496560097, -0.5877354145050049, 0.20652498304843903, -0.0225797351449728, -1.004747748374939, -0.06812041997909546, 0.2997569441795349, 0.39263445138931274, 0.3407253921031952, 0.5861207842826843, -0.012373361736536026, -0.11469843983650208, 0.004672755487263203, 0.2208060622215271, -0.2905493974685669, -0.07564537972211838, -0.1998549997806549, 0.1543102115392685, -0.4155055582523346, -0.5563061833381653 ]
open-llm-leaderboard/details_radm__Philosophy-Platypus2-13b
open-llm-leaderboard
2023-08-29T00:47:23Z
201
0
[ "region:us" ]
null
2023-08-29T00:46:27Z
--- pretty_name: Evaluation run of radm/Philosophy-Platypus2-13b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [radm/Philosophy-Platypus2-13b](https://huggingface.co/radm/Philosophy-Platypus2-13b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_radm__Philosophy-Platypus2-13b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T00:45:24.163346](https://huggingface.co/datasets/open-llm-leaderboard/details_radm__Philosophy-Platypus2-13b/blob/main/results_2023-08-29T00%3A45%3A24.163346.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.5437981691869808,\n \"\ acc_stderr\": 0.03484311795554624,\n \"acc_norm\": 0.547878610439407,\n \ \ \"acc_norm_stderr\": 0.034826606717822575,\n \"mc1\": 0.24357405140758873,\n\ \ \"mc1_stderr\": 0.015026354824910782,\n \"mc2\": 0.37335488461829447,\n\ \ \"mc2_stderr\": 0.014112790281285795\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5477815699658704,\n \"acc_stderr\": 0.014544519880633822,\n\ \ \"acc_norm\": 0.5861774744027304,\n \"acc_norm_stderr\": 0.014392730009221004\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5828520215096594,\n\ \ \"acc_stderr\": 0.004920800313232742,\n \"acc_norm\": 0.785202150965943,\n\ \ \"acc_norm_stderr\": 0.0040984271589492634\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4222222222222222,\n\ \ \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.4222222222222222,\n\ \ \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.039889037033362836,\n\ \ \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.039889037033362836\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n\ \ \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \ \ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n\ \ \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6388888888888888,\n\ \ \"acc_stderr\": 0.04016660030451233,\n \"acc_norm\": 0.6388888888888888,\n\ \ \"acc_norm_stderr\": 0.04016660030451233\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n\ \ \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.47398843930635837,\n\ \ \"acc_stderr\": 0.03807301726504511,\n \"acc_norm\": 0.47398843930635837,\n\ \ \"acc_norm_stderr\": 0.03807301726504511\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n\ \ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4340425531914894,\n \"acc_stderr\": 0.03240038086792747,\n\ \ \"acc_norm\": 0.4340425531914894,\n \"acc_norm_stderr\": 0.03240038086792747\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\ \ \"acc_stderr\": 0.042663394431593935,\n \"acc_norm\": 0.2894736842105263,\n\ \ \"acc_norm_stderr\": 0.042663394431593935\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.04154659671707548,\n\ \ \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.04154659671707548\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.37566137566137564,\n \"acc_stderr\": 0.024942368931159795,\n \"\ acc_norm\": 0.37566137566137564,\n \"acc_norm_stderr\": 0.024942368931159795\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n\ \ \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n\ \ \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6645161290322581,\n\ \ \"acc_stderr\": 0.026860206444724352,\n \"acc_norm\": 0.6645161290322581,\n\ \ \"acc_norm_stderr\": 0.026860206444724352\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.03465304488406796,\n\ \ \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.03465304488406796\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\"\ : 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.03646204963253812,\n\ \ \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.03646204963253812\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7323232323232324,\n \"acc_stderr\": 0.03154449888270286,\n \"\ acc_norm\": 0.7323232323232324,\n \"acc_norm_stderr\": 0.03154449888270286\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7979274611398963,\n \"acc_stderr\": 0.02897908979429673,\n\ \ \"acc_norm\": 0.7979274611398963,\n \"acc_norm_stderr\": 0.02897908979429673\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6,\n \"acc_stderr\": 0.02483881198803316,\n \"acc_norm\"\ : 0.6,\n \"acc_norm_stderr\": 0.02483881198803316\n },\n \"harness|hendrycksTest-high_school_mathematics|5\"\ : {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n\ \ \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5630252100840336,\n \"acc_stderr\": 0.032219436365661956,\n\ \ \"acc_norm\": 0.5630252100840336,\n \"acc_norm_stderr\": 0.032219436365661956\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"\ acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7688073394495413,\n \"acc_stderr\": 0.018075750241633146,\n \"\ acc_norm\": 0.7688073394495413,\n \"acc_norm_stderr\": 0.018075750241633146\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"\ acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.75,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.75,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.7215189873417721,\n \"acc_stderr\": 0.02917868230484253,\n\ \ \"acc_norm\": 0.7215189873417721,\n \"acc_norm_stderr\": 0.02917868230484253\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6053811659192825,\n\ \ \"acc_stderr\": 0.03280400504755291,\n \"acc_norm\": 0.6053811659192825,\n\ \ \"acc_norm_stderr\": 0.03280400504755291\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5954198473282443,\n \"acc_stderr\": 0.043046937953806645,\n\ \ \"acc_norm\": 0.5954198473282443,\n \"acc_norm_stderr\": 0.043046937953806645\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6611570247933884,\n \"acc_stderr\": 0.04320767807536671,\n \"\ acc_norm\": 0.6611570247933884,\n \"acc_norm_stderr\": 0.04320767807536671\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6388888888888888,\n\ \ \"acc_stderr\": 0.04643454608906275,\n \"acc_norm\": 0.6388888888888888,\n\ \ \"acc_norm_stderr\": 0.04643454608906275\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5950920245398773,\n \"acc_stderr\": 0.03856672163548913,\n\ \ \"acc_norm\": 0.5950920245398773,\n \"acc_norm_stderr\": 0.03856672163548913\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n\ \ \"acc_stderr\": 0.04432804055291518,\n \"acc_norm\": 0.32142857142857145,\n\ \ \"acc_norm_stderr\": 0.04432804055291518\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6990291262135923,\n \"acc_stderr\": 0.04541609446503947,\n\ \ \"acc_norm\": 0.6990291262135923,\n \"acc_norm_stderr\": 0.04541609446503947\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7008547008547008,\n\ \ \"acc_stderr\": 0.02999695185834949,\n \"acc_norm\": 0.7008547008547008,\n\ \ \"acc_norm_stderr\": 0.02999695185834949\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \ \ \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7420178799489144,\n\ \ \"acc_stderr\": 0.01564583018834895,\n \"acc_norm\": 0.7420178799489144,\n\ \ \"acc_norm_stderr\": 0.01564583018834895\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5982658959537572,\n \"acc_stderr\": 0.026394104177643637,\n\ \ \"acc_norm\": 0.5982658959537572,\n \"acc_norm_stderr\": 0.026394104177643637\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3463687150837989,\n\ \ \"acc_stderr\": 0.015913546784020117,\n \"acc_norm\": 0.3463687150837989,\n\ \ \"acc_norm_stderr\": 0.015913546784020117\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5816993464052288,\n \"acc_stderr\": 0.028245134024387303,\n\ \ \"acc_norm\": 0.5816993464052288,\n \"acc_norm_stderr\": 0.028245134024387303\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n\ \ \"acc_stderr\": 0.027316847674192707,\n \"acc_norm\": 0.6366559485530546,\n\ \ \"acc_norm_stderr\": 0.027316847674192707\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.02672586880910079,\n\ \ \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.02672586880910079\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.41843971631205673,\n \"acc_stderr\": 0.029427994039419994,\n \ \ \"acc_norm\": 0.41843971631205673,\n \"acc_norm_stderr\": 0.029427994039419994\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.40352020860495436,\n\ \ \"acc_stderr\": 0.012530241301193186,\n \"acc_norm\": 0.40352020860495436,\n\ \ \"acc_norm_stderr\": 0.012530241301193186\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5441176470588235,\n \"acc_stderr\": 0.030254372573976715,\n\ \ \"acc_norm\": 0.5441176470588235,\n \"acc_norm_stderr\": 0.030254372573976715\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5343137254901961,\n \"acc_stderr\": 0.020180144843307293,\n \ \ \"acc_norm\": 0.5343137254901961,\n \"acc_norm_stderr\": 0.020180144843307293\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n\ \ \"acc_stderr\": 0.04709306978661895,\n \"acc_norm\": 0.5909090909090909,\n\ \ \"acc_norm_stderr\": 0.04709306978661895\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5959183673469388,\n \"acc_stderr\": 0.03141470802586589,\n\ \ \"acc_norm\": 0.5959183673469388,\n \"acc_norm_stderr\": 0.03141470802586589\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7164179104477612,\n\ \ \"acc_stderr\": 0.031871875379197966,\n \"acc_norm\": 0.7164179104477612,\n\ \ \"acc_norm_stderr\": 0.031871875379197966\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \ \ \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n\ \ \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.463855421686747,\n\ \ \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.03274485211946956,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.03274485211946956\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24357405140758873,\n\ \ \"mc1_stderr\": 0.015026354824910782,\n \"mc2\": 0.37335488461829447,\n\ \ \"mc2_stderr\": 0.014112790281285795\n }\n}\n```" repo_url: https://huggingface.co/radm/Philosophy-Platypus2-13b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|arc:challenge|25_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hellaswag|10_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T00:45:24.163346.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T00:45:24.163346.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T00_45_24.163346 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T00:45:24.163346.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T00:45:24.163346.parquet' - config_name: results data_files: - split: 2023_08_29T00_45_24.163346 path: - results_2023-08-29T00:45:24.163346.parquet - split: latest path: - results_2023-08-29T00:45:24.163346.parquet --- # Dataset Card for Evaluation run of radm/Philosophy-Platypus2-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/radm/Philosophy-Platypus2-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [radm/Philosophy-Platypus2-13b](https://huggingface.co/radm/Philosophy-Platypus2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_radm__Philosophy-Platypus2-13b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T00:45:24.163346](https://huggingface.co/datasets/open-llm-leaderboard/details_radm__Philosophy-Platypus2-13b/blob/main/results_2023-08-29T00%3A45%3A24.163346.json): ```python { "all": { "acc": 0.5437981691869808, "acc_stderr": 0.03484311795554624, "acc_norm": 0.547878610439407, "acc_norm_stderr": 0.034826606717822575, "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": 0.37335488461829447, "mc2_stderr": 0.014112790281285795 }, "harness|arc:challenge|25": { "acc": 0.5477815699658704, "acc_stderr": 0.014544519880633822, "acc_norm": 0.5861774744027304, "acc_norm_stderr": 0.014392730009221004 }, "harness|hellaswag|10": { "acc": 0.5828520215096594, "acc_stderr": 0.004920800313232742, "acc_norm": 0.785202150965943, "acc_norm_stderr": 0.0040984271589492634 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4222222222222222, "acc_stderr": 0.04266763404099582, "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.039889037033362836, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04016660030451233, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04016660030451233 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.47398843930635837, "acc_stderr": 0.03807301726504511, "acc_norm": 0.47398843930635837, "acc_norm_stderr": 0.03807301726504511 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105653, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105653 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4340425531914894, "acc_stderr": 0.03240038086792747, "acc_norm": 0.4340425531914894, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.042663394431593935, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.042663394431593935 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.04154659671707548, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37566137566137564, "acc_stderr": 0.024942368931159795, "acc_norm": 0.37566137566137564, "acc_norm_stderr": 0.024942368931159795 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6645161290322581, "acc_stderr": 0.026860206444724352, "acc_norm": 0.6645161290322581, "acc_norm_stderr": 0.026860206444724352 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406796, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406796 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.03646204963253812, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.03646204963253812 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7323232323232324, "acc_stderr": 0.03154449888270286, "acc_norm": 0.7323232323232324, "acc_norm_stderr": 0.03154449888270286 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7979274611398963, "acc_stderr": 0.02897908979429673, "acc_norm": 0.7979274611398963, "acc_norm_stderr": 0.02897908979429673 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6, "acc_stderr": 0.02483881198803316, "acc_norm": 0.6, "acc_norm_stderr": 0.02483881198803316 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5630252100840336, "acc_stderr": 0.032219436365661956, "acc_norm": 0.5630252100840336, "acc_norm_stderr": 0.032219436365661956 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7688073394495413, "acc_stderr": 0.018075750241633146, "acc_norm": 0.7688073394495413, "acc_norm_stderr": 0.018075750241633146 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4351851851851852, "acc_stderr": 0.03381200005643525, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.03381200005643525 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.75, "acc_stderr": 0.03039153369274154, "acc_norm": 0.75, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7215189873417721, "acc_stderr": 0.02917868230484253, "acc_norm": 0.7215189873417721, "acc_norm_stderr": 0.02917868230484253 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6053811659192825, "acc_stderr": 0.03280400504755291, "acc_norm": 0.6053811659192825, "acc_norm_stderr": 0.03280400504755291 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5954198473282443, "acc_stderr": 0.043046937953806645, "acc_norm": 0.5954198473282443, "acc_norm_stderr": 0.043046937953806645 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6611570247933884, "acc_stderr": 0.04320767807536671, "acc_norm": 0.6611570247933884, "acc_norm_stderr": 0.04320767807536671 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04643454608906275, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04643454608906275 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5950920245398773, "acc_stderr": 0.03856672163548913, "acc_norm": 0.5950920245398773, "acc_norm_stderr": 0.03856672163548913 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291518, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291518 }, "harness|hendrycksTest-management|5": { "acc": 0.6990291262135923, "acc_stderr": 0.04541609446503947, "acc_norm": 0.6990291262135923, "acc_norm_stderr": 0.04541609446503947 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7008547008547008, "acc_stderr": 0.02999695185834949, "acc_norm": 0.7008547008547008, "acc_norm_stderr": 0.02999695185834949 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7420178799489144, "acc_stderr": 0.01564583018834895, "acc_norm": 0.7420178799489144, "acc_norm_stderr": 0.01564583018834895 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5982658959537572, "acc_stderr": 0.026394104177643637, "acc_norm": 0.5982658959537572, "acc_norm_stderr": 0.026394104177643637 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3463687150837989, "acc_stderr": 0.015913546784020117, "acc_norm": 0.3463687150837989, "acc_norm_stderr": 0.015913546784020117 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5816993464052288, "acc_stderr": 0.028245134024387303, "acc_norm": 0.5816993464052288, "acc_norm_stderr": 0.028245134024387303 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6366559485530546, "acc_stderr": 0.027316847674192707, "acc_norm": 0.6366559485530546, "acc_norm_stderr": 0.027316847674192707 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6388888888888888, "acc_stderr": 0.02672586880910079, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.02672586880910079 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.029427994039419994, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.029427994039419994 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.40352020860495436, "acc_stderr": 0.012530241301193186, "acc_norm": 0.40352020860495436, "acc_norm_stderr": 0.012530241301193186 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5441176470588235, "acc_stderr": 0.030254372573976715, "acc_norm": 0.5441176470588235, "acc_norm_stderr": 0.030254372573976715 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5343137254901961, "acc_stderr": 0.020180144843307293, "acc_norm": 0.5343137254901961, "acc_norm_stderr": 0.020180144843307293 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661895, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661895 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5959183673469388, "acc_stderr": 0.03141470802586589, "acc_norm": 0.5959183673469388, "acc_norm_stderr": 0.03141470802586589 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7164179104477612, "acc_stderr": 0.031871875379197966, "acc_norm": 0.7164179104477612, "acc_norm_stderr": 0.031871875379197966 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.041633319989322626, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322626 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890594, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.03274485211946956, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.03274485211946956 }, "harness|truthfulqa:mc|0": { "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": 0.37335488461829447, "mc2_stderr": 0.014112790281285795 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7663077116012573, -0.8684896230697632, 0.2758052945137024, 0.20688892900943756, -0.17043960094451904, -0.05236559733748436, 0.01962321251630783, -0.23462408781051636, 0.5578598976135254, -0.10209537297487259, -0.494414746761322, -0.6998400688171387, -0.456574946641922, 0.22009910643100739, 0.05194791406393051, 0.8171401023864746, -0.15635110437870026, -0.15195611119270325, 0.054057732224464417, -0.0398779921233654, -0.26610293984413147, -0.32819971442222595, -0.4976605772972107, -0.41244807839393616, 0.12844733893871307, 0.4154105484485626, 0.5110850930213928, 0.8548913598060608, 0.7000398635864258, 0.27916210889816284, -0.33347970247268677, -0.05024784803390503, -0.15190254151821136, -0.27096447348594666, 0.3478555381298065, -0.3148389756679535, -0.8778514266014099, 0.3463568389415741, 0.7834120392799377, 0.698255181312561, -0.09254081547260284, 0.2990206778049469, 0.0355810783803463, 0.5632247924804688, -0.31455937027931213, 0.05930652469396591, -0.2660320997238159, 0.28360965847969055, -0.195875346660614, -0.3222545385360718, -0.31141650676727295, -0.2720358073711395, -0.0732697919011116, -0.9479259252548218, 0.27090269327163696, 0.33309510350227356, 1.5768966674804688, -0.19350749254226685, -0.22234655916690826, 0.06148238107562065, -0.086822509765625, 1.0556164979934692, -0.814048707485199, 0.3500414788722992, 0.8172008395195007, 0.12225160002708435, -0.13874581456184387, -0.5933814644813538, -0.6313949823379517, 0.16943980753421783, -0.39263615012168884, 0.36812689900398254, -0.03943408280611038, -0.18138334155082703, 0.3801940977573395, 0.6793586611747742, -0.6595548391342163, 0.13794474303722382, -0.6435608267784119, -0.16636984050273895, 1.0366871356964111, 0.3859645128250122, 0.06519246101379395, -0.3442959487438202, -0.69697505235672, -0.7115688323974609, -0.37381014227867126, 0.25536906719207764, 0.43704819679260254, 0.3402262330055237, -0.42000913619995117, 0.6490519642829895, -0.37906208634376526, 0.5882183313369751, 0.38260143995285034, 0.017691800370812416, 0.8997381329536438, -0.6686913371086121, -0.548504650592804, -0.07357525080442429, 1.132995843887329, 0.6275014877319336, 0.04309588670730591, 0.23149016499519348, 0.06630964577198029, -0.0553457997739315, -0.042267341166734695, -0.8878583908081055, -0.3435903787612915, 0.1592799425125122, -0.42718595266342163, -0.47752997279167175, 0.3512156009674072, -0.9403343200683594, 0.1429460197687149, -0.004975171759724617, 0.42811742424964905, -0.5235189199447632, -0.15676288306713104, 0.37537720799446106, -0.37721750140190125, 0.8750476837158203, -0.16600902378559113, -0.8277734518051147, 0.41581329703330994, 0.5333966016769409, 0.7832996249198914, -0.08729445934295654, -0.3975108563899994, -0.04493362084031105, -0.07049211859703064, -0.24182263016700745, 0.5739299654960632, -0.3397115170955658, -0.47634157538414, -0.3068106770515442, 0.25888878107070923, -0.26235806941986084, -0.3450815975666046, 0.7735603451728821, -0.21694445610046387, 0.21974150836467743, -0.4369465708732605, -0.6720659732818604, 0.16173917055130005, 0.34980008006095886, -0.387382835149765, 1.294068455696106, 0.1851787120103836, -0.8676266074180603, 0.4525502026081085, -0.6259644627571106, -0.19441726803779602, 0.016257332637906075, -0.08981624245643616, -0.8104015588760376, -0.30522486567497253, 0.188014417886734, 0.4037730395793915, -0.16532960534095764, -0.1578303873538971, -0.3938056230545044, -0.31141573190689087, 0.37525078654289246, -0.1511843502521515, 1.2517955303192139, -0.014458288438618183, -0.7117044925689697, -0.18044635653495789, -1.254034161567688, 0.35352519154548645, 0.22058336436748505, -0.38361018896102905, -0.20886622369289398, -0.47949159145355225, -0.033656761050224304, 0.17310728132724762, 0.27946504950523376, -0.832515299320221, 0.2630138397216797, -0.37748533487319946, 0.16683252155780792, 1.2747265100479126, 0.033667873591184616, 0.1976558119058609, -0.551410436630249, 0.5184348225593567, 0.19371071457862854, 0.19756895303726196, 0.39403167366981506, -0.6389456987380981, -0.7659622430801392, -0.48251089453697205, -0.08227284252643585, 0.6134918332099915, -0.11604613810777664, 1.175890564918518, 0.08933326601982117, -0.8854364156723022, -0.4613736867904663, -0.1446067839860916, 0.4438096582889557, 0.8202329277992249, 0.5935971140861511, -0.051832377910614014, -0.6257222890853882, -1.0584443807601929, -0.3170241415500641, -0.14227958023548126, 0.1589166522026062, 0.23887893557548523, 1.00349760055542, -0.22812135517597198, 0.6459310054779053, -1.058617353439331, -0.21622753143310547, 0.1926979422569275, -0.05735023319721222, 0.7936309576034546, 0.7985337376594543, 0.5746073722839355, -0.6475179195404053, -0.5075617432594299, 0.18694095313549042, -0.9636315107345581, -0.05464375764131546, 0.1433250457048416, -0.33085212111473083, 0.025879044085741043, 0.09364870190620422, -0.7231210470199585, 0.5475943684577942, 0.19755445420742035, -1.1411769390106201, 0.9965997338294983, -0.3895343840122223, 0.5339599847793579, -1.0106096267700195, 0.18782931566238403, -0.05885588377714157, 0.06058554723858833, -0.4809984564781189, 0.017399199306964874, 0.0757407695055008, 0.4161028265953064, -0.4393419325351715, 0.7987197637557983, -0.7058730125427246, -0.0904933288693428, 0.4511149823665619, 0.1564318686723709, -0.10355617105960846, 0.28939852118492126, -0.1425405889749527, 0.7670103907585144, 0.8177386522293091, -0.5057379603385925, 0.5458135604858398, 0.47357887029647827, -0.2574878931045532, 0.793744683265686, -0.4809279143810272, -0.2821408212184906, 0.2795369625091553, -0.03924821689724922, -0.8331067562103271, -0.4704868793487549, 0.04712286964058876, -0.6371383666992188, -0.12927329540252686, 0.40730762481689453, -0.2719971537590027, -0.7976519465446472, -0.9284395575523376, 0.3777357339859009, 0.7233732342720032, -0.4120125472545624, -0.1700708568096161, 0.048313215374946594, 0.12079105526208878, -0.8348485827445984, -0.85996413230896, -0.4783124327659607, -0.20545120537281036, -0.7574724555015564, 0.28352364897727966, -0.29305243492126465, -0.27545100450515747, -0.08815748244524002, -0.24320237338542938, -0.26499617099761963, -0.004531422164291143, 0.13504593074321747, 0.647579550743103, -0.44974496960639954, -0.31587129831314087, -0.26682421565055847, -0.22341059148311615, 0.26394376158714294, -0.1417391002178192, 0.3890492618083954, -0.4620596468448639, -0.3737086057662964, -0.5208920240402222, -0.025066107511520386, 0.7029217481613159, -0.10392345488071442, 0.7524254322052002, 0.4267030656337738, -0.3111059069633484, -0.027557160705327988, -0.25348347425460815, -0.3460988402366638, -0.5947875380516052, 0.2521989345550537, -0.5068396925926208, -1.0647270679473877, 0.7980663776397705, 0.5141926407814026, 0.07247186452150345, 1.113122582435608, 0.5884268283843994, -0.29439306259155273, 1.0325993299484253, 0.020545348525047302, 0.41632455587387085, 0.33804887533187866, -0.7022297978401184, 0.11355865001678467, -0.9190287590026855, -0.3707525432109833, -0.5730043053627014, -0.5044218301773071, -0.6720807552337646, -0.07989715784788132, 0.2468816488981247, 0.1501559466123581, -0.6688812375068665, 0.5300129652023315, -0.878153383731842, 0.6117487549781799, 0.5265469551086426, 0.25118011236190796, 0.17012479901313782, -0.10430239886045456, -0.4183770418167114, -0.20964393019676208, -0.4562607407569885, -0.2816097140312195, 1.246195673942566, 0.324287474155426, 0.8047211170196533, 0.10659914463758469, 0.8376748561859131, 0.05915352329611778, -0.09757300466299057, -0.5843833684921265, 0.6652520298957825, 0.156566321849823, -0.8254665732383728, -0.3661542534828186, -0.507577657699585, -1.0434629917144775, 0.41338711977005005, -0.1959516406059265, -0.8223626613616943, 0.14118310809135437, 0.028664156794548035, -0.2327321320772171, 0.5391110181808472, -0.522672176361084, 0.8384222984313965, -0.09264346957206726, -0.4917488992214203, 0.09989602863788605, -0.8502315282821655, 0.43595364689826965, 0.23207423090934753, 0.27575379610061646, 0.05777831748127937, 0.26559507846832275, 1.2211607694625854, -0.8207150101661682, 0.4267061948776245, 0.055232614278793335, 0.017415726557374, 0.31230661273002625, -0.11444483697414398, 0.4875923991203308, 0.1680251955986023, -0.00814390555024147, -0.15079852938652039, 0.3051740527153015, -0.8820342421531677, -0.05916738137602806, 0.8907157778739929, -1.024489402770996, -0.5818209648132324, -0.9820177555084229, -0.543207049369812, 0.052933771163225174, 0.5511060357093811, 0.37547579407691956, 0.5471000075340271, 0.03167974576354027, 0.5087931156158447, 0.9829497337341309, -0.18853916227817535, 0.5833107233047485, 0.24836967885494232, 0.1172790601849556, -0.7121299505233765, 0.8375319838523865, 0.03174803406000137, 0.376477986574173, 0.2971087694168091, 0.4213109016418457, -0.5802847743034363, -0.2132820039987564, -0.1790984869003296, 0.5448029637336731, -0.6642496585845947, -0.27914148569107056, -0.41774070262908936, -0.38428372144699097, -0.7867244482040405, -0.6633047461509705, -0.306192547082901, -0.498028963804245, -0.5190900564193726, -0.5192965865135193, 0.6321092247962952, 0.45874953269958496, -0.42524194717407227, 0.06766129285097122, -0.43807682394981384, 0.23718102276325226, 0.3466532528400421, 0.5909480452537537, -0.38838329911231995, -0.5841213464736938, 0.10437881201505661, -0.15137946605682373, -0.5468114614486694, -0.8925567269325256, 0.2474079132080078, -0.02077341265976429, 0.4621698260307312, 0.6483335494995117, 0.05779630318284035, 0.8366296887397766, -0.19117048382759094, 0.9932637214660645, 0.3702256381511688, -0.7600322961807251, 0.7618743181228638, -0.3347141742706299, 0.16177000105381012, 0.6457621455192566, 0.1292169690132141, -0.21472394466400146, -0.6633707284927368, -1.2803196907043457, -0.7654925584793091, 0.7117996215820312, 0.3733297884464264, -0.21277649700641632, 0.019235186278820038, 0.18871714174747467, -0.286565899848938, -0.2341686636209488, -0.71669602394104, -0.9405977129936218, -0.19628487527370453, -0.5203850865364075, 0.15822291374206543, 0.0204661563038826, -0.40531641244888306, -0.8397729992866516, 0.885459303855896, 0.06613501906394958, 0.5884129405021667, 0.4646238386631012, 0.09299325197935104, 0.06131836771965027, 0.4785778820514679, 0.9803885817527771, 0.7808393239974976, -0.5195302367210388, 0.446500688791275, 0.3636697828769684, -1.1402976512908936, 0.49164295196533203, 0.29452770948410034, -0.07204683125019073, -0.02468939870595932, 0.4082137644290924, 0.40074047446250916, -0.008818067610263824, -0.17535804212093353, 0.628939151763916, -0.05427075922489166, -0.5861689448356628, -0.40533876419067383, 0.04222508892416954, -0.12391285598278046, -0.05465174838900566, 0.4036445915699005, -0.14374929666519165, -0.10824620723724365, -0.4769574701786041, 0.4900270104408264, 0.37755608558654785, -0.44480764865875244, -0.18958541750907898, 0.7937157154083252, -0.19450095295906067, -0.11783614754676819, 0.2723983824253082, -0.1741180419921875, -0.5689175724983215, 1.065455675125122, 0.683870792388916, 0.6172454357147217, -0.301087886095047, -0.07007447630167007, 0.9770548939704895, 0.3904661238193512, -0.043673571199178696, 0.5265704393386841, 0.3046737611293793, -0.2852118909358978, 0.2095726877450943, -0.9321044087409973, -0.017176659777760506, 0.1413862556219101, -0.8355751037597656, 0.29951775074005127, -0.6667850613594055, -0.2244638055562973, 0.0233326256275177, 0.4416145980358124, -0.4272974729537964, 0.5790902376174927, -0.43827664852142334, 1.2370935678482056, -0.9805606603622437, 0.7315126657485962, 0.7925531268119812, -0.5238982439041138, -1.0115342140197754, -0.5987985730171204, -0.007540010381489992, -0.8179930448532104, 0.6190498471260071, -0.09797906875610352, 0.1384824514389038, -0.06025907024741173, -0.7109288573265076, -0.8788440227508545, 1.403267502784729, -0.07888403534889221, -0.4035051763057709, 0.2545192837715149, -0.0030904377344995737, 0.42786088585853577, 0.1407037228345871, 0.5883590579032898, 0.7297286987304688, 0.8189759850502014, -0.04125899821519852, -0.6901441216468811, 0.3535723090171814, -0.46069303154945374, -0.34167078137397766, 0.46467408537864685, -0.9136623740196228, 1.2212623357772827, 0.044243816286325455, 0.1756814867258072, -0.18928849697113037, 0.6886798739433289, 0.7862494587898254, 0.2788030207157135, 0.32802632451057434, 0.9716117978096008, 0.8589521050453186, -0.5171706080436707, 0.9571999311447144, -0.22091609239578247, 0.8923044800758362, 0.6584811806678772, 0.19503679871559143, 0.7515008449554443, 0.6506022214889526, -0.545985996723175, 0.5186972618103027, 0.7946923971176147, -0.29686999320983887, 0.3955404758453369, 0.2209637314081192, -0.1776800900697708, -0.1621863692998886, 0.46561744809150696, -0.9233986139297485, 0.1103157326579094, 0.14932671189308167, -0.3393596112728119, 0.05936216935515404, -0.43477892875671387, 0.358159601688385, -0.01140647754073143, -0.0194010678678751, 0.3580845296382904, 0.047192733734846115, -0.3676791489124298, 0.924921452999115, -0.11874433606863022, 0.8129897713661194, -0.5206732749938965, -0.06575693190097809, -0.3967495262622833, 0.6520687341690063, -0.442817360162735, -1.094525933265686, 0.14489005506038666, 0.047726549208164215, -0.11953262239694595, -0.21220070123672485, 0.6794880032539368, -0.18828216195106506, -0.7623485326766968, 0.10367615520954132, 0.020029371604323387, 0.07289064675569534, 0.6247071623802185, -0.7173317670822144, -0.318778932094574, -0.08531300723552704, -0.48846426606178284, 0.11375608295202255, 0.31918999552726746, 0.3408891558647156, 0.5383332967758179, 0.6300496459007263, 0.2040695995092392, 0.41719284653663635, -0.5964494943618774, 0.7421593070030212, -1.0751628875732422, -0.7468855381011963, -0.9445828199386597, 0.3425842821598053, -0.33375081419944763, -0.8610964417457581, 0.997194766998291, 1.0653706789016724, 0.8983374834060669, 0.0303681418299675, 0.6416553258895874, -0.3992336392402649, 0.2910402715206146, -0.37901490926742554, 1.0004574060440063, -0.8115329742431641, -0.23750025033950806, -0.2797524929046631, -0.729292094707489, -0.4416385889053345, 0.9385543465614319, -0.19655025005340576, 0.043359968811273575, 1.088161826133728, 0.6686516404151917, -0.1643858253955841, 0.07614171504974365, -0.0831182524561882, 0.5511611104011536, 0.39977774024009705, 1.0280331373214722, 0.6517310738563538, -0.7860978841781616, 0.3462943136692047, -0.47281113266944885, -0.43838635087013245, -0.3792566955089569, -0.46373263001441956, -0.8915795087814331, -0.47557705640792847, -0.23065242171287537, -0.6660752892494202, -0.15509064495563507, 1.013350486755371, 0.44202762842178345, -0.9061272740364075, -0.4626859724521637, -0.08559221774339676, 0.10683313757181168, -0.5633305311203003, -0.42037904262542725, 0.7801237106323242, -0.07780814170837402, -0.4951719641685486, 0.1360173374414444, -0.10034338384866714, 0.2577418386936188, 0.13462331891059875, -0.42466217279434204, -0.7353503704071045, 0.018579134717583656, 0.433646023273468, 0.3831648826599121, -0.6850759983062744, -0.7371614575386047, 0.28040075302124023, -0.5201632380485535, 0.46191561222076416, -0.0006274516345001757, -0.5190337300300598, 0.09928914904594421, 0.6821611523628235, 0.5076315999031067, 0.6440172791481018, 0.000255993363680318, 0.04593278095126152, -0.6544288992881775, 0.2648663818836212, 0.010729905217885971, 0.230652317404747, 0.0013178345980122685, -0.25183340907096863, 0.7187381386756897, 0.7469947338104248, -0.5152639150619507, -1.097725510597229, -0.4260654151439667, -1.449454665184021, -0.003918392118066549, 1.0944695472717285, 0.030705448240041733, -0.5100489854812622, 0.21363578736782074, -0.11229291558265686, 0.14475993812084198, -0.30791598558425903, 0.7377904653549194, 0.7837855815887451, -0.35513490438461304, 0.1205577626824379, -0.6720166206359863, 0.43548107147216797, 0.5413292050361633, -1.1532386541366577, -0.059571441262960434, 0.2120843529701233, 0.36155596375465393, 0.34138554334640503, 0.6443056464195251, -0.1504923701286316, 0.2566603720188141, 0.22432763874530792, 0.03867010027170181, 0.04433722048997879, 0.1147654727101326, -0.2292443960905075, 0.07442370802164078, -0.2355087697505951, -0.4516304135322571 ]
open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v5
open-llm-leaderboard
2023-08-29T09:19:12Z
201
0
[ "region:us" ]
null
2023-08-29T09:18:15Z
--- pretty_name: Evaluation run of yeontaek/llama-2-13B-ensemble-v5 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yeontaek/llama-2-13B-ensemble-v5](https://huggingface.co/yeontaek/llama-2-13B-ensemble-v5)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v5\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T09:17:14.183323](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v5/blob/main/results_2023-08-29T09%3A17%3A14.183323.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.5953117661059801,\n \"\ acc_stderr\": 0.03391896483304526,\n \"acc_norm\": 0.5994365516843435,\n\ \ \"acc_norm_stderr\": 0.033896234769528244,\n \"mc1\": 0.3843329253365973,\n\ \ \"mc1_stderr\": 0.017028707301245203,\n \"mc2\": 0.5327328500103707,\n\ \ \"mc2_stderr\": 0.015551697577870274\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5844709897610921,\n \"acc_stderr\": 0.014401366641216388,\n\ \ \"acc_norm\": 0.6262798634812287,\n \"acc_norm_stderr\": 0.014137708601759084\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6290579565823541,\n\ \ \"acc_stderr\": 0.004820697457420419,\n \"acc_norm\": 0.8306114319856602,\n\ \ \"acc_norm_stderr\": 0.0037432817493736324\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5185185185185185,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.5185185185185185,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.631578947368421,\n \"acc_stderr\": 0.03925523381052932,\n\ \ \"acc_norm\": 0.631578947368421,\n \"acc_norm_stderr\": 0.03925523381052932\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\ \ \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \ \ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6264150943396226,\n \"acc_stderr\": 0.029773082713319875,\n\ \ \"acc_norm\": 0.6264150943396226,\n \"acc_norm_stderr\": 0.029773082713319875\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6666666666666666,\n\ \ \"acc_stderr\": 0.03942082639927213,\n \"acc_norm\": 0.6666666666666666,\n\ \ \"acc_norm_stderr\": 0.03942082639927213\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \ \ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n\ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5722543352601156,\n\ \ \"acc_stderr\": 0.037724468575180255,\n \"acc_norm\": 0.5722543352601156,\n\ \ \"acc_norm_stderr\": 0.037724468575180255\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n\ \ \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.49361702127659574,\n \"acc_stderr\": 0.032683358999363366,\n\ \ \"acc_norm\": 0.49361702127659574,\n \"acc_norm_stderr\": 0.032683358999363366\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n\ \ \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.36243386243386244,\n \"acc_stderr\": 0.02475747390275206,\n \"\ acc_norm\": 0.36243386243386244,\n \"acc_norm_stderr\": 0.02475747390275206\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n\ \ \"acc_stderr\": 0.043758884927270605,\n \"acc_norm\": 0.3968253968253968,\n\ \ \"acc_norm_stderr\": 0.043758884927270605\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6806451612903226,\n\ \ \"acc_stderr\": 0.026522709674667765,\n \"acc_norm\": 0.6806451612903226,\n\ \ \"acc_norm_stderr\": 0.026522709674667765\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4729064039408867,\n \"acc_stderr\": 0.03512819077876106,\n\ \ \"acc_norm\": 0.4729064039408867,\n \"acc_norm_stderr\": 0.03512819077876106\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\"\ : 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n\ \ \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"\ acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593552,\n\ \ \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593552\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6076923076923076,\n \"acc_stderr\": 0.02475600038213095,\n \ \ \"acc_norm\": 0.6076923076923076,\n \"acc_norm_stderr\": 0.02475600038213095\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948496,\n \ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948496\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \ \ \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3443708609271523,\n \"acc_stderr\": 0.03879687024073327,\n \"\ acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.03879687024073327\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8036697247706422,\n \"acc_stderr\": 0.017030719339154336,\n \"\ acc_norm\": 0.8036697247706422,\n \"acc_norm_stderr\": 0.017030719339154336\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321616,\n \"\ acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321616\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"\ acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080852,\n \ \ \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080852\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n\ \ \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.6367713004484304,\n\ \ \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6717557251908397,\n \"acc_stderr\": 0.041184385658062976,\n\ \ \"acc_norm\": 0.6717557251908397,\n \"acc_norm_stderr\": 0.041184385658062976\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n\ \ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.7407407407407407,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.035123852837050475,\n\ \ \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.035123852837050475\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.36607142857142855,\n\ \ \"acc_stderr\": 0.045723723587374296,\n \"acc_norm\": 0.36607142857142855,\n\ \ \"acc_norm_stderr\": 0.045723723587374296\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260595,\n\ \ \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260595\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8504273504273504,\n\ \ \"acc_stderr\": 0.02336505149175372,\n \"acc_norm\": 0.8504273504273504,\n\ \ \"acc_norm_stderr\": 0.02336505149175372\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \ \ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7879948914431673,\n\ \ \"acc_stderr\": 0.014616099385833685,\n \"acc_norm\": 0.7879948914431673,\n\ \ \"acc_norm_stderr\": 0.014616099385833685\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.025816756791584194,\n\ \ \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.025816756791584194\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4849162011173184,\n\ \ \"acc_stderr\": 0.01671489037999606,\n \"acc_norm\": 0.4849162011173184,\n\ \ \"acc_norm_stderr\": 0.01671489037999606\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n\ \ \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n\ \ \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n\ \ \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.024922001168886335,\n\ \ \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.024922001168886335\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \ \ \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4654498044328553,\n\ \ \"acc_stderr\": 0.012739711554045708,\n \"acc_norm\": 0.4654498044328553,\n\ \ \"acc_norm_stderr\": 0.012739711554045708\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5992647058823529,\n \"acc_stderr\": 0.029768263528933105,\n\ \ \"acc_norm\": 0.5992647058823529,\n \"acc_norm_stderr\": 0.029768263528933105\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6029411764705882,\n \"acc_stderr\": 0.019794488900024117,\n \ \ \"acc_norm\": 0.6029411764705882,\n \"acc_norm_stderr\": 0.019794488900024117\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n\ \ \"acc_stderr\": 0.04461272175910507,\n \"acc_norm\": 0.6818181818181818,\n\ \ \"acc_norm_stderr\": 0.04461272175910507\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6530612244897959,\n \"acc_stderr\": 0.030472526026726496,\n\ \ \"acc_norm\": 0.6530612244897959,\n \"acc_norm_stderr\": 0.030472526026726496\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7711442786069652,\n\ \ \"acc_stderr\": 0.02970528405677244,\n \"acc_norm\": 0.7711442786069652,\n\ \ \"acc_norm_stderr\": 0.02970528405677244\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \ \ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n\ \ \"acc_stderr\": 0.03889951252827217,\n \"acc_norm\": 0.4819277108433735,\n\ \ \"acc_norm_stderr\": 0.03889951252827217\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n\ \ \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3843329253365973,\n\ \ \"mc1_stderr\": 0.017028707301245203,\n \"mc2\": 0.5327328500103707,\n\ \ \"mc2_stderr\": 0.015551697577870274\n }\n}\n```" repo_url: https://huggingface.co/yeontaek/llama-2-13B-ensemble-v5 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|arc:challenge|25_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hellaswag|10_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T09:17:14.183323.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T09:17:14.183323.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T09_17_14.183323 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T09:17:14.183323.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T09:17:14.183323.parquet' - config_name: results data_files: - split: 2023_08_29T09_17_14.183323 path: - results_2023-08-29T09:17:14.183323.parquet - split: latest path: - results_2023-08-29T09:17:14.183323.parquet --- # Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v5 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yeontaek/llama-2-13B-ensemble-v5 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yeontaek/llama-2-13B-ensemble-v5](https://huggingface.co/yeontaek/llama-2-13B-ensemble-v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v5", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T09:17:14.183323](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v5/blob/main/results_2023-08-29T09%3A17%3A14.183323.json): ```python { "all": { "acc": 0.5953117661059801, "acc_stderr": 0.03391896483304526, "acc_norm": 0.5994365516843435, "acc_norm_stderr": 0.033896234769528244, "mc1": 0.3843329253365973, "mc1_stderr": 0.017028707301245203, "mc2": 0.5327328500103707, "mc2_stderr": 0.015551697577870274 }, "harness|arc:challenge|25": { "acc": 0.5844709897610921, "acc_stderr": 0.014401366641216388, "acc_norm": 0.6262798634812287, "acc_norm_stderr": 0.014137708601759084 }, "harness|hellaswag|10": { "acc": 0.6290579565823541, "acc_stderr": 0.004820697457420419, "acc_norm": 0.8306114319856602, "acc_norm_stderr": 0.0037432817493736324 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.631578947368421, "acc_stderr": 0.03925523381052932, "acc_norm": 0.631578947368421, "acc_norm_stderr": 0.03925523381052932 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6264150943396226, "acc_stderr": 0.029773082713319875, "acc_norm": 0.6264150943396226, "acc_norm_stderr": 0.029773082713319875 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03942082639927213, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03942082639927213 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5722543352601156, "acc_stderr": 0.037724468575180255, "acc_norm": 0.5722543352601156, "acc_norm_stderr": 0.037724468575180255 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105653, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105653 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.49361702127659574, "acc_stderr": 0.032683358999363366, "acc_norm": 0.49361702127659574, "acc_norm_stderr": 0.032683358999363366 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.36243386243386244, "acc_stderr": 0.02475747390275206, "acc_norm": 0.36243386243386244, "acc_norm_stderr": 0.02475747390275206 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3968253968253968, "acc_stderr": 0.043758884927270605, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.043758884927270605 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6806451612903226, "acc_stderr": 0.026522709674667765, "acc_norm": 0.6806451612903226, "acc_norm_stderr": 0.026522709674667765 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4729064039408867, "acc_stderr": 0.03512819077876106, "acc_norm": 0.4729064039408867, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7212121212121212, "acc_stderr": 0.03501438706296781, "acc_norm": 0.7212121212121212, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8756476683937824, "acc_stderr": 0.023814477086593552, "acc_norm": 0.8756476683937824, "acc_norm_stderr": 0.023814477086593552 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6076923076923076, "acc_stderr": 0.02475600038213095, "acc_norm": 0.6076923076923076, "acc_norm_stderr": 0.02475600038213095 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948496, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948496 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.031282177063684614, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.031282177063684614 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.03879687024073327, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.03879687024073327 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8036697247706422, "acc_stderr": 0.017030719339154336, "acc_norm": 0.8036697247706422, "acc_norm_stderr": 0.017030719339154336 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321616, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321616 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.02747974455080852, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.02747974455080852 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6367713004484304, "acc_stderr": 0.032277904428505, "acc_norm": 0.6367713004484304, "acc_norm_stderr": 0.032277904428505 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6717557251908397, "acc_stderr": 0.041184385658062976, "acc_norm": 0.6717557251908397, "acc_norm_stderr": 0.041184385658062976 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.042365112580946336, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.035123852837050475, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.035123852837050475 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.36607142857142855, "acc_stderr": 0.045723723587374296, "acc_norm": 0.36607142857142855, "acc_norm_stderr": 0.045723723587374296 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260595, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260595 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8504273504273504, "acc_stderr": 0.02336505149175372, "acc_norm": 0.8504273504273504, "acc_norm_stderr": 0.02336505149175372 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7879948914431673, "acc_stderr": 0.014616099385833685, "acc_norm": 0.7879948914431673, "acc_norm_stderr": 0.014616099385833685 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6416184971098265, "acc_stderr": 0.025816756791584194, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.025816756791584194 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4849162011173184, "acc_stderr": 0.01671489037999606, "acc_norm": 0.4849162011173184, "acc_norm_stderr": 0.01671489037999606 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6405228758169934, "acc_stderr": 0.027475969910660952, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.027475969910660952 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.02600330111788514, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.02600330111788514 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7222222222222222, "acc_stderr": 0.024922001168886335, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.024922001168886335 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4654498044328553, "acc_stderr": 0.012739711554045708, "acc_norm": 0.4654498044328553, "acc_norm_stderr": 0.012739711554045708 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5992647058823529, "acc_stderr": 0.029768263528933105, "acc_norm": 0.5992647058823529, "acc_norm_stderr": 0.029768263528933105 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6029411764705882, "acc_stderr": 0.019794488900024117, "acc_norm": 0.6029411764705882, "acc_norm_stderr": 0.019794488900024117 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910507, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910507 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6530612244897959, "acc_stderr": 0.030472526026726496, "acc_norm": 0.6530612244897959, "acc_norm_stderr": 0.030472526026726496 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7711442786069652, "acc_stderr": 0.02970528405677244, "acc_norm": 0.7711442786069652, "acc_norm_stderr": 0.02970528405677244 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.03889951252827217, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.03889951252827217 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.3843329253365973, "mc1_stderr": 0.017028707301245203, "mc2": 0.5327328500103707, "mc2_stderr": 0.015551697577870274 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7305089831352234, -0.797832727432251, 0.2938637137413025, 0.1987496167421341, -0.22028201818466187, -0.05388482287526131, 0.045790933072566986, -0.2938095033168793, 0.5833916664123535, -0.10071755200624466, -0.5348464250564575, -0.7068055868148804, -0.4532986283302307, 0.18149320781230927, -0.039564065635204315, 0.8025445342063904, -0.14989358186721802, -0.1599731296300888, 0.07226767390966415, -0.020561104640364647, -0.25029462575912476, -0.3519439995288849, -0.45605918765068054, -0.34380313754081726, 0.14089390635490417, 0.41509026288986206, 0.41823911666870117, 0.7992053627967834, 0.6681169271469116, 0.31845560669898987, -0.34344786405563354, -0.041081659495830536, -0.1959344446659088, -0.2876244783401489, 0.41889163851737976, -0.3528764843940735, -0.8206993937492371, 0.29884475469589233, 0.7719013094902039, 0.6708514094352722, -0.06099751219153404, 0.3431603014469147, 0.02533663436770439, 0.5328568816184998, -0.3268330693244934, 0.04779435321688652, -0.29558682441711426, 0.2256123572587967, -0.22378045320510864, -0.2778269350528717, -0.2674873471260071, -0.2549317479133606, -0.10382002592086792, -0.912105917930603, 0.2536732256412506, 0.28128179907798767, 1.5683287382125854, -0.15465103089809418, -0.24015168845653534, 0.13936175405979156, -0.09766033291816711, 1.0811007022857666, -0.8830516934394836, 0.36399298906326294, 0.8035741448402405, 0.14909574389457703, -0.1594708412885666, -0.6319864392280579, -0.685991108417511, 0.13453438878059387, -0.36171668767929077, 0.3843216598033905, -0.04162173718214035, -0.21798986196517944, 0.34017762541770935, 0.6482545137405396, -0.6565194725990295, 0.19567355513572693, -0.6922531723976135, -0.2184506058692932, 1.0847809314727783, 0.3570391833782196, 0.0589446984231472, -0.35487470030784607, -0.6932640075683594, -0.6870090365409851, -0.3495400846004486, 0.2666721045970917, 0.48121559619903564, 0.3452959954738617, -0.4168176054954529, 0.677484393119812, -0.43052923679351807, 0.5537703037261963, 0.4481196403503418, -0.035021327435970306, 0.9458423256874084, -0.657220184803009, -0.5364851355552673, -0.0264494176954031, 1.107524037361145, 0.629732608795166, 0.030896009877324104, 0.2087765634059906, 0.04251933842897415, -0.045436978340148926, -0.010178358294069767, -0.8927410244941711, -0.28769782185554504, 0.19049809873104095, -0.40977340936660767, -0.5274748802185059, 0.3335048258304596, -0.9165946841239929, 0.1654372364282608, -0.02182653173804283, 0.45847880840301514, -0.4633449912071228, -0.13207755982875824, 0.28218936920166016, -0.4133453667163849, 0.8591489791870117, -0.19644322991371155, -0.7974991202354431, 0.3876221477985382, 0.5036789774894714, 0.7598209381103516, -0.07999696582555771, -0.40768247842788696, -0.045087020844221115, -0.09204816818237305, -0.2748263478279114, 0.5781010389328003, -0.2731524705886841, -0.5064775943756104, -0.29292768239974976, 0.279956191778183, -0.2426646500825882, -0.3205984830856323, 0.753912627696991, -0.25738564133644104, 0.22062985599040985, -0.4306733012199402, -0.6530423164367676, 0.13501891493797302, 0.39418530464172363, -0.40530309081077576, 1.3116310834884644, 0.17146055400371552, -0.8240759968757629, 0.4666427671909332, -0.56051105260849, -0.18587447702884674, -0.01364577654749155, -0.06402633339166641, -0.8491016626358032, -0.29722779989242554, 0.1991702914237976, 0.41503795981407166, -0.153336301445961, -0.16013118624687195, -0.45660048723220825, -0.3633788228034973, 0.33351996541023254, -0.19226303696632385, 1.2331926822662354, -0.0538284070789814, -0.7332285046577454, -0.12926970422267914, -1.3219826221466064, 0.35555943846702576, 0.2258329540491104, -0.3612661063671112, -0.13179129362106323, -0.4898317754268646, -0.025001002475619316, 0.15865576267242432, 0.2977680265903473, -0.7789517045021057, 0.2667236924171448, -0.3538942039012909, 0.18126580119132996, 1.318372368812561, 0.06376292556524277, 0.15573573112487793, -0.568905234336853, 0.4639447331428528, 0.226321280002594, 0.14758934080600739, 0.41295647621154785, -0.6276270747184753, -0.7720178961753845, -0.49190038442611694, -0.06678251177072525, 0.6001648902893066, -0.17224560678005219, 1.1778589487075806, 0.09353356808423996, -0.9401291608810425, -0.4297364354133606, -0.14292208850383759, 0.3946963846683502, 0.8690263628959656, 0.6153669357299805, -0.03848852589726448, -0.6395087242126465, -1.0930777788162231, -0.2316628396511078, -0.2278098464012146, 0.15327690541744232, 0.19622455537319183, 1.0077989101409912, -0.24800799787044525, 0.6050357818603516, -1.034455418586731, -0.1911628693342209, 0.1889488250017166, -0.06091705709695816, 0.7782777547836304, 0.7673479318618774, 0.6371443271636963, -0.63910973072052, -0.4797140061855316, 0.16446729004383087, -0.9308616518974304, -0.08299923688173294, 0.143259659409523, -0.3410348892211914, 0.0628998875617981, 0.12776757776737213, -0.680639386177063, 0.5517471432685852, 0.23137056827545166, -1.0564234256744385, 1.065663456916809, -0.3293151259422302, 0.5735427737236023, -1.0443857908248901, 0.1889685094356537, -0.07130137085914612, 0.07678650319576263, -0.49970152974128723, 0.009242254309356213, 0.05984419956803322, 0.446153849363327, -0.4859517812728882, 0.7798985242843628, -0.6528089046478271, -0.05293404683470726, 0.45980024337768555, 0.13805276155471802, -0.14237134158611298, 0.37154072523117065, -0.16485832631587982, 0.8010835647583008, 0.8180017471313477, -0.5197241902351379, 0.5445414185523987, 0.4247957766056061, -0.27581313252449036, 0.7995072603225708, -0.5059212446212769, -0.2533086836338043, 0.2597840130329132, -0.049676068127155304, -0.8964036107063293, -0.51863032579422, 0.035993292927742004, -0.5885950326919556, -0.10063572227954865, 0.3930707573890686, -0.24990664422512054, -0.8386702537536621, -0.9645693302154541, 0.3865985572338104, 0.7221235632896423, -0.46260493993759155, -0.1884450763463974, 0.05532237887382507, 0.12748460471630096, -0.8209165334701538, -0.882983386516571, -0.5087557435035706, -0.2625338137149811, -0.772089421749115, 0.3421695828437805, -0.3254958987236023, -0.26923680305480957, -0.10002316534519196, -0.2547450363636017, -0.328094482421875, 0.05855850130319595, 0.11971297860145569, 0.6859371662139893, -0.44955652952194214, -0.2473074346780777, -0.23723377287387848, -0.19833357632160187, 0.20154501497745514, -0.18554897606372833, 0.3896825611591339, -0.4620456397533417, -0.41343677043914795, -0.5146183371543884, -0.04100687429308891, 0.677757203578949, -0.07747910916805267, 0.7786709666252136, 0.4662986993789673, -0.3260556161403656, 0.025517726317048073, -0.2680436372756958, -0.3038257360458374, -0.597256600856781, 0.2902483642101288, -0.508767306804657, -1.0718649625778198, 0.8218865990638733, 0.5063826441764832, 0.0639464482665062, 1.211540937423706, 0.6094620227813721, -0.3392270803451538, 0.9974914789199829, 0.02258305810391903, 0.27573710680007935, 0.3533737361431122, -0.689789354801178, 0.09756749123334885, -0.8938709497451782, -0.34928640723228455, -0.6229972243309021, -0.5622656345367432, -0.7297078967094421, -0.049388427287340164, 0.2893771529197693, 0.17103853821754456, -0.7054965496063232, 0.5745404362678528, -0.8241122364997864, 0.572424590587616, 0.5409302115440369, 0.2612518072128296, 0.14960384368896484, -0.15168969333171844, -0.3996252715587616, -0.14402876794338226, -0.4334041178226471, -0.2629232406616211, 1.2597978115081787, 0.26451534032821655, 0.7250952124595642, 0.1462954729795456, 0.9313037395477295, 0.07838573306798935, -0.13379131257534027, -0.5651662349700928, 0.6478424668312073, 0.19050323963165283, -0.8203399777412415, -0.41965723037719727, -0.512467086315155, -1.0615397691726685, 0.38410264253616333, -0.1507989913225174, -0.939515233039856, 0.15805895626544952, 0.049441173672676086, -0.21530501544475555, 0.5125940442085266, -0.5562811493873596, 0.8349984884262085, -0.12477955967187881, -0.49204573035240173, 0.11060246080160141, -0.8419241309165955, 0.46186792850494385, 0.21166960895061493, 0.2351154386997223, 0.06465761363506317, 0.26042649149894714, 1.1999047994613647, -0.8306085467338562, 0.4874216616153717, 0.06404020637273788, 0.004195620771497488, 0.4145757555961609, -0.20023386180400848, 0.5385264158248901, 0.12824611365795135, -0.058395158499479294, -0.09798654168844223, 0.27417081594467163, -0.8795349597930908, -0.00949147716164589, 0.9209834337234497, -1.028879165649414, -0.6172057390213013, -0.8909497857093811, -0.4520918130874634, 0.033414825797080994, 0.5434051156044006, 0.41507211327552795, 0.4964251220226288, -0.013915039598941803, 0.46972405910491943, 0.8703357577323914, -0.10931720584630966, 0.6085773706436157, 0.19484683871269226, 0.13833120465278625, -0.6476547718048096, 0.8707273602485657, 0.07258918136358261, 0.3854769170284271, 0.2491253912448883, 0.39795124530792236, -0.5787671208381653, -0.2124820351600647, -0.23850750923156738, 0.47426876425743103, -0.6439493298530579, -0.28561580181121826, -0.37515711784362793, -0.3830219805240631, -0.7936868071556091, -0.6287083029747009, -0.27491191029548645, -0.543775200843811, -0.5510885715484619, -0.5585153102874756, 0.6238710284233093, 0.41527146100997925, -0.40389034152030945, 0.10599192976951599, -0.45545312762260437, 0.2720719277858734, 0.4085560739040375, 0.5823391675949097, -0.34871718287467957, -0.640549898147583, 0.1045127883553505, -0.1617397665977478, -0.5424870252609253, -0.9380192756652832, 0.24938517808914185, -0.028618022799491882, 0.5249635577201843, 0.6448549032211304, 0.027629176154732704, 0.8761134743690491, -0.17873987555503845, 1.0709576606750488, 0.35774677991867065, -0.7920946478843689, 0.709449052810669, -0.3012889623641968, 0.1681952178478241, 0.6649075150489807, 0.13303112983703613, -0.22633624076843262, -0.6185465455055237, -1.2945241928100586, -0.788896918296814, 0.7006553411483765, 0.3935067057609558, -0.23471564054489136, 0.040369488298892975, 0.11729000508785248, -0.30617526173591614, -0.17509429156780243, -0.6832015514373779, -0.8870379328727722, -0.2205556333065033, -0.5111161470413208, 0.09344949573278427, 0.014256003312766552, -0.3529241681098938, -0.7852446436882019, 0.9416152834892273, 0.006650708150118589, 0.60636967420578, 0.45294591784477234, 0.06908168643712997, 0.06345432996749878, 0.4764956533908844, 0.9954274892807007, 0.6727692484855652, -0.46301406621932983, 0.4141046404838562, 0.4254167079925537, -1.0799723863601685, 0.4768742024898529, 0.31986361742019653, -0.07611282914876938, -0.0886833444237709, 0.5052251815795898, 0.4185432195663452, -0.005418715067207813, -0.17144541442394257, 0.5825511813163757, 0.0009384214645251632, -0.6098924875259399, -0.37611937522888184, 0.09407006949186325, -0.07656686007976532, -0.001764776767231524, 0.3941747546195984, -0.14615808427333832, -0.052290212363004684, -0.503923773765564, 0.48070821166038513, 0.34765779972076416, -0.4849441945552826, -0.18534520268440247, 0.7551575303077698, -0.19816260039806366, -0.07776204496622086, 0.29209691286087036, -0.19705942273139954, -0.6201273202896118, 1.1497880220413208, 0.646328866481781, 0.6687525510787964, -0.2737770974636078, -0.09496179968118668, 0.9483473896980286, 0.38288891315460205, -0.06553575396537781, 0.5110970735549927, 0.31106406450271606, -0.2852809727191925, 0.21245677769184113, -0.878661572933197, 0.021440735086798668, 0.1762200891971588, -0.7799878120422363, 0.29141414165496826, -0.5869689583778381, -0.2036164402961731, 0.0232794638723135, 0.4216602146625519, -0.4709422290325165, 0.5655927062034607, -0.3861243724822998, 1.2019563913345337, -0.9632126092910767, 0.6785298585891724, 0.732562780380249, -0.5205880999565125, -1.0560919046401978, -0.5271499156951904, 0.04889736324548721, -0.860809862613678, 0.563789963722229, -0.07259278744459152, 0.15496201813220978, -0.06673241406679153, -0.7532086968421936, -0.9243207573890686, 1.4362739324569702, -0.08503133803606033, -0.46009695529937744, 0.2976701855659485, -0.04020647332072258, 0.46535709500312805, 0.17162452638149261, 0.5908507704734802, 0.7528373003005981, 0.857631266117096, -0.02084934525191784, -0.7194404602050781, 0.2826491594314575, -0.4953951835632324, -0.36367759108543396, 0.4565149247646332, -0.9839559197425842, 1.191796064376831, -0.004605427850037813, 0.20228923857212067, -0.1402633786201477, 0.7234075665473938, 0.7698850631713867, 0.2875729203224182, 0.3771282434463501, 0.9299559593200684, 0.9218804836273193, -0.49124279618263245, 0.9767475128173828, -0.1978020966053009, 0.8431730270385742, 0.6971825361251831, 0.18029674887657166, 0.7636387348175049, 0.6677953004837036, -0.5570415258407593, 0.556526780128479, 0.8202642798423767, -0.30848798155784607, 0.41920679807662964, 0.2792719900608063, -0.18098492920398712, -0.16973815858364105, 0.44728830456733704, -0.9193341135978699, 0.15175358951091766, 0.0923890620470047, -0.29478296637535095, 0.09830215573310852, -0.5094172358512878, 0.2643999755382538, -0.06269090622663498, -0.06769156455993652, 0.3715595006942749, 0.043490827083587646, -0.3576897084712982, 0.8909112215042114, -0.09636610746383667, 0.7703843116760254, -0.5345184803009033, -0.048663388937711716, -0.3594170808792114, 0.5659324526786804, -0.44485238194465637, -1.059454321861267, 0.12549230456352234, 0.06800531595945358, -0.11441580206155777, -0.09849649667739868, 0.7312338352203369, -0.1841135025024414, -0.8343231081962585, 0.09413371980190277, 0.01798069290816784, 0.09360995143651962, 0.5877478122711182, -0.64985591173172, -0.3445981740951538, -0.0547427237033844, -0.5293985605239868, 0.0909682884812355, 0.3191675543785095, 0.26142367720603943, 0.5352065563201904, 0.660077691078186, 0.14419466257095337, 0.4212679862976074, -0.5369585752487183, 0.7727835178375244, -1.0716313123703003, -0.7617068290710449, -0.9134071469306946, 0.4424777328968048, -0.3400810956954956, -0.8887265920639038, 0.9518628120422363, 1.0145540237426758, 0.9136499762535095, 0.014851118437945843, 0.6690625548362732, -0.3626922369003296, 0.2543417811393738, -0.39712798595428467, 0.9521833062171936, -0.8408539295196533, -0.21661090850830078, -0.2741647958755493, -0.6874446868896484, -0.39645081758499146, 0.8858717679977417, -0.1931217759847641, 0.11164695769548416, 1.0229495763778687, 0.6532471179962158, -0.13263028860092163, 0.05485909804701805, -0.0500478595495224, 0.5846025943756104, 0.39089342951774597, 0.9923815727233887, 0.6402457356452942, -0.8132987022399902, 0.350907564163208, -0.47941288352012634, -0.3882277309894562, -0.4064614772796631, -0.4915076792240143, -0.8902062773704529, -0.4601784646511078, -0.21062524616718292, -0.5875280499458313, -0.18251007795333862, 1.0063865184783936, 0.45764854550361633, -0.9282447695732117, -0.42125263810157776, -0.06689859181642532, 0.12273741513490677, -0.5745800733566284, -0.40643590688705444, 0.786493718624115, -0.0783386081457138, -0.5251550674438477, 0.18582305312156677, -0.1422932744026184, 0.28736215829849243, 0.11397892236709595, -0.42411965131759644, -0.7159644365310669, 0.010042853653430939, 0.4216841459274292, 0.34245678782463074, -0.635290801525116, -0.7672712206840515, 0.2959103286266327, -0.5038309097290039, 0.42313486337661743, -0.08841478824615479, -0.5411418676376343, 0.05318359658122063, 0.6910145282745361, 0.5155215263366699, 0.697544276714325, -0.05353127047419548, 0.02085554413497448, -0.6758692264556885, 0.2663800120353699, -0.022332649677991867, 0.25307923555374146, -0.05328567326068878, -0.3009476661682129, 0.7191606760025024, 0.7304273843765259, -0.46864598989486694, -1.059469223022461, -0.4393095076084137, -1.4829716682434082, -0.0005124337621964514, 1.0383179187774658, 0.03822486102581024, -0.5021455883979797, 0.26230835914611816, -0.10864933580160141, 0.17653492093086243, -0.3504208028316498, 0.7842510938644409, 0.7964602112770081, -0.3061896562576294, 0.14643727242946625, -0.6005930304527283, 0.35883671045303345, 0.5196937918663025, -1.2085485458374023, -0.14321944117546082, 0.21253709495067596, 0.3093984127044678, 0.3698265552520752, 0.6025450825691223, -0.09814631193876266, 0.2851671874523163, 0.25419092178344727, 0.022991640493273735, 0.057331234216690063, 0.08748997002840042, -0.209025576710701, 0.09095712751150131, -0.22650253772735596, -0.46428969502449036 ]
open-llm-leaderboard/details_RobbeD__Orca-Platypus-3B
open-llm-leaderboard
2023-08-29T10:09:17Z
201
0
[ "region:us" ]
null
2023-08-29T10:08:19Z
--- pretty_name: Evaluation run of RobbeD/Orca-Platypus-3B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [RobbeD/Orca-Platypus-3B](https://huggingface.co/RobbeD/Orca-Platypus-3B) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_RobbeD__Orca-Platypus-3B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T10:07:29.426848](https://huggingface.co/datasets/open-llm-leaderboard/details_RobbeD__Orca-Platypus-3B/blob/main/results_2023-08-29T10%3A07%3A29.426848.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.27366722319077513,\n \"\ acc_stderr\": 0.03210093803398038,\n \"acc_norm\": 0.2768555704328155,\n\ \ \"acc_norm_stderr\": 0.0320995646677269,\n \"mc1\": 0.27539779681762544,\n\ \ \"mc1_stderr\": 0.01563813566777552,\n \"mc2\": 0.41928517905056045,\n\ \ \"mc2_stderr\": 0.0152672030417133\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.3993174061433447,\n \"acc_stderr\": 0.014312094557946707,\n\ \ \"acc_norm\": 0.4308873720136519,\n \"acc_norm_stderr\": 0.014471133392642476\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4967138020314678,\n\ \ \"acc_stderr\": 0.004989673640014264,\n \"acc_norm\": 0.6532563234415455,\n\ \ \"acc_norm_stderr\": 0.004749606196363324\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.34074074074074073,\n\ \ \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.34074074074074073,\n\ \ \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.28289473684210525,\n \"acc_stderr\": 0.03665349695640767,\n\ \ \"acc_norm\": 0.28289473684210525,\n \"acc_norm_stderr\": 0.03665349695640767\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.27,\n\ \ \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \ \ \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.2792452830188679,\n \"acc_stderr\": 0.027611163402399715,\n\ \ \"acc_norm\": 0.2792452830188679,\n \"acc_norm_stderr\": 0.027611163402399715\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3055555555555556,\n\ \ \"acc_stderr\": 0.03852084696008534,\n \"acc_norm\": 0.3055555555555556,\n\ \ \"acc_norm_stderr\": 0.03852084696008534\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.15,\n \"acc_stderr\": 0.035887028128263714,\n \ \ \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.035887028128263714\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\"\ : 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23699421965317918,\n\ \ \"acc_stderr\": 0.03242414757483098,\n \"acc_norm\": 0.23699421965317918,\n\ \ \"acc_norm_stderr\": 0.03242414757483098\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179963,\n\ \ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179963\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n\ \ \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.25957446808510637,\n \"acc_stderr\": 0.028659179374292323,\n\ \ \"acc_norm\": 0.25957446808510637,\n \"acc_norm_stderr\": 0.028659179374292323\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21929824561403508,\n\ \ \"acc_stderr\": 0.03892431106518754,\n \"acc_norm\": 0.21929824561403508,\n\ \ \"acc_norm_stderr\": 0.03892431106518754\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n\ \ \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"\ acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.19047619047619047,\n\ \ \"acc_stderr\": 0.03512207412302052,\n \"acc_norm\": 0.19047619047619047,\n\ \ \"acc_norm_stderr\": 0.03512207412302052\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.22258064516129034,\n\ \ \"acc_stderr\": 0.023664216671642535,\n \"acc_norm\": 0.22258064516129034,\n\ \ \"acc_norm_stderr\": 0.023664216671642535\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.2315270935960591,\n \"acc_stderr\": 0.029678333141444437,\n\ \ \"acc_norm\": 0.2315270935960591,\n \"acc_norm_stderr\": 0.029678333141444437\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\"\ : 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.30303030303030304,\n \"acc_stderr\": 0.035886248000917075,\n\ \ \"acc_norm\": 0.30303030303030304,\n \"acc_norm_stderr\": 0.035886248000917075\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.2727272727272727,\n \"acc_stderr\": 0.03173071239071724,\n \"\ acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.03173071239071724\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.25906735751295334,\n \"acc_stderr\": 0.031618779179354094,\n\ \ \"acc_norm\": 0.25906735751295334,\n \"acc_norm_stderr\": 0.031618779179354094\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.23846153846153847,\n \"acc_stderr\": 0.021606294494647727,\n\ \ \"acc_norm\": 0.23846153846153847,\n \"acc_norm_stderr\": 0.021606294494647727\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712163,\n \ \ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712163\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.027025433498882364,\n\ \ \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.027025433498882364\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"\ acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.24403669724770644,\n \"acc_stderr\": 0.01841528635141641,\n \"\ acc_norm\": 0.24403669724770644,\n \"acc_norm_stderr\": 0.01841528635141641\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.20833333333333334,\n \"acc_stderr\": 0.027696910713093936,\n \"\ acc_norm\": 0.20833333333333334,\n \"acc_norm_stderr\": 0.027696910713093936\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.24019607843137256,\n \"acc_stderr\": 0.02998373305591361,\n \"\ acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.02998373305591361\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.26582278481012656,\n \"acc_stderr\": 0.028756799629658342,\n \ \ \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.028756799629658342\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.2600896860986547,\n\ \ \"acc_stderr\": 0.029442495585857476,\n \"acc_norm\": 0.2600896860986547,\n\ \ \"acc_norm_stderr\": 0.029442495585857476\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.20610687022900764,\n \"acc_stderr\": 0.035477710041594626,\n\ \ \"acc_norm\": 0.20610687022900764,\n \"acc_norm_stderr\": 0.035477710041594626\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.4049586776859504,\n \"acc_stderr\": 0.044811377559424694,\n \"\ acc_norm\": 0.4049586776859504,\n \"acc_norm_stderr\": 0.044811377559424694\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3055555555555556,\n\ \ \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.3055555555555556,\n\ \ \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.3128834355828221,\n \"acc_stderr\": 0.036429145782924055,\n\ \ \"acc_norm\": 0.3128834355828221,\n \"acc_norm_stderr\": 0.036429145782924055\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.19642857142857142,\n\ \ \"acc_stderr\": 0.03770970049347018,\n \"acc_norm\": 0.19642857142857142,\n\ \ \"acc_norm_stderr\": 0.03770970049347018\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690877,\n\ \ \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690877\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.3076923076923077,\n\ \ \"acc_stderr\": 0.03023638994217309,\n \"acc_norm\": 0.3076923076923077,\n\ \ \"acc_norm_stderr\": 0.03023638994217309\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.34738186462324394,\n\ \ \"acc_stderr\": 0.01702667174865574,\n \"acc_norm\": 0.34738186462324394,\n\ \ \"acc_norm_stderr\": 0.01702667174865574\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.30346820809248554,\n \"acc_stderr\": 0.02475241196091721,\n\ \ \"acc_norm\": 0.30346820809248554,\n \"acc_norm_stderr\": 0.02475241196091721\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2446927374301676,\n\ \ \"acc_stderr\": 0.014378169884098447,\n \"acc_norm\": 0.2446927374301676,\n\ \ \"acc_norm_stderr\": 0.014378169884098447\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.28104575163398693,\n \"acc_stderr\": 0.025738854797818723,\n\ \ \"acc_norm\": 0.28104575163398693,\n \"acc_norm_stderr\": 0.025738854797818723\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3633440514469453,\n\ \ \"acc_stderr\": 0.027316847674192707,\n \"acc_norm\": 0.3633440514469453,\n\ \ \"acc_norm_stderr\": 0.027316847674192707\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2716049382716049,\n \"acc_stderr\": 0.024748624490537365,\n\ \ \"acc_norm\": 0.2716049382716049,\n \"acc_norm_stderr\": 0.024748624490537365\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2553191489361702,\n \"acc_stderr\": 0.026011992930902006,\n \ \ \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.026011992930902006\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\ \ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\ \ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.16176470588235295,\n \"acc_stderr\": 0.02236867256288675,\n\ \ \"acc_norm\": 0.16176470588235295,\n \"acc_norm_stderr\": 0.02236867256288675\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.29411764705882354,\n \"acc_stderr\": 0.018433427649401892,\n \ \ \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.018433427649401892\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.19090909090909092,\n\ \ \"acc_stderr\": 0.03764425585984926,\n \"acc_norm\": 0.19090909090909092,\n\ \ \"acc_norm_stderr\": 0.03764425585984926\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.1836734693877551,\n \"acc_stderr\": 0.02478907133200767,\n\ \ \"acc_norm\": 0.1836734693877551,\n \"acc_norm_stderr\": 0.02478907133200767\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.31840796019900497,\n\ \ \"acc_stderr\": 0.032941184790540944,\n \"acc_norm\": 0.31840796019900497,\n\ \ \"acc_norm_stderr\": 0.032941184790540944\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110175,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110175\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.30120481927710846,\n\ \ \"acc_stderr\": 0.035716092300534796,\n \"acc_norm\": 0.30120481927710846,\n\ \ \"acc_norm_stderr\": 0.035716092300534796\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.3391812865497076,\n \"acc_stderr\": 0.03631053496488905,\n\ \ \"acc_norm\": 0.3391812865497076,\n \"acc_norm_stderr\": 0.03631053496488905\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.27539779681762544,\n\ \ \"mc1_stderr\": 0.01563813566777552,\n \"mc2\": 0.41928517905056045,\n\ \ \"mc2_stderr\": 0.0152672030417133\n }\n}\n```" repo_url: https://huggingface.co/RobbeD/Orca-Platypus-3B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|arc:challenge|25_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hellaswag|10_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T10:07:29.426848.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T10:07:29.426848.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T10_07_29.426848 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T10:07:29.426848.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T10:07:29.426848.parquet' - config_name: results data_files: - split: 2023_08_29T10_07_29.426848 path: - results_2023-08-29T10:07:29.426848.parquet - split: latest path: - results_2023-08-29T10:07:29.426848.parquet --- # Dataset Card for Evaluation run of RobbeD/Orca-Platypus-3B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/RobbeD/Orca-Platypus-3B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [RobbeD/Orca-Platypus-3B](https://huggingface.co/RobbeD/Orca-Platypus-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_RobbeD__Orca-Platypus-3B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T10:07:29.426848](https://huggingface.co/datasets/open-llm-leaderboard/details_RobbeD__Orca-Platypus-3B/blob/main/results_2023-08-29T10%3A07%3A29.426848.json): ```python { "all": { "acc": 0.27366722319077513, "acc_stderr": 0.03210093803398038, "acc_norm": 0.2768555704328155, "acc_norm_stderr": 0.0320995646677269, "mc1": 0.27539779681762544, "mc1_stderr": 0.01563813566777552, "mc2": 0.41928517905056045, "mc2_stderr": 0.0152672030417133 }, "harness|arc:challenge|25": { "acc": 0.3993174061433447, "acc_stderr": 0.014312094557946707, "acc_norm": 0.4308873720136519, "acc_norm_stderr": 0.014471133392642476 }, "harness|hellaswag|10": { "acc": 0.4967138020314678, "acc_stderr": 0.004989673640014264, "acc_norm": 0.6532563234415455, "acc_norm_stderr": 0.004749606196363324 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.040943762699967926, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.28289473684210525, "acc_stderr": 0.03665349695640767, "acc_norm": 0.28289473684210525, "acc_norm_stderr": 0.03665349695640767 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2792452830188679, "acc_stderr": 0.027611163402399715, "acc_norm": 0.2792452830188679, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03852084696008534, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03852084696008534 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.15, "acc_stderr": 0.035887028128263714, "acc_norm": 0.15, "acc_norm_stderr": 0.035887028128263714 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23699421965317918, "acc_stderr": 0.03242414757483098, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.03242414757483098 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179963, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179963 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.25957446808510637, "acc_stderr": 0.028659179374292323, "acc_norm": 0.25957446808510637, "acc_norm_stderr": 0.028659179374292323 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518754, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518754 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.036951833116502325, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.19047619047619047, "acc_stderr": 0.03512207412302052, "acc_norm": 0.19047619047619047, "acc_norm_stderr": 0.03512207412302052 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.22258064516129034, "acc_stderr": 0.023664216671642535, "acc_norm": 0.22258064516129034, "acc_norm_stderr": 0.023664216671642535 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2315270935960591, "acc_stderr": 0.029678333141444437, "acc_norm": 0.2315270935960591, "acc_norm_stderr": 0.029678333141444437 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.30303030303030304, "acc_stderr": 0.035886248000917075, "acc_norm": 0.30303030303030304, "acc_norm_stderr": 0.035886248000917075 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2727272727272727, "acc_stderr": 0.03173071239071724, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.25906735751295334, "acc_stderr": 0.031618779179354094, "acc_norm": 0.25906735751295334, "acc_norm_stderr": 0.031618779179354094 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.23846153846153847, "acc_stderr": 0.021606294494647727, "acc_norm": 0.23846153846153847, "acc_norm_stderr": 0.021606294494647727 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712163, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712163 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.027025433498882364, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.027025433498882364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24403669724770644, "acc_stderr": 0.01841528635141641, "acc_norm": 0.24403669724770644, "acc_norm_stderr": 0.01841528635141641 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.20833333333333334, "acc_stderr": 0.027696910713093936, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.027696910713093936 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591361, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.028756799629658342, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.028756799629658342 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.2600896860986547, "acc_stderr": 0.029442495585857476, "acc_norm": 0.2600896860986547, "acc_norm_stderr": 0.029442495585857476 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.20610687022900764, "acc_stderr": 0.035477710041594626, "acc_norm": 0.20610687022900764, "acc_norm_stderr": 0.035477710041594626 }, "harness|hendrycksTest-international_law|5": { "acc": 0.4049586776859504, "acc_stderr": 0.044811377559424694, "acc_norm": 0.4049586776859504, "acc_norm_stderr": 0.044811377559424694 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.3055555555555556, "acc_stderr": 0.044531975073749834, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3128834355828221, "acc_stderr": 0.036429145782924055, "acc_norm": 0.3128834355828221, "acc_norm_stderr": 0.036429145782924055 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.19642857142857142, "acc_stderr": 0.03770970049347018, "acc_norm": 0.19642857142857142, "acc_norm_stderr": 0.03770970049347018 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690877, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690877 }, "harness|hendrycksTest-marketing|5": { "acc": 0.3076923076923077, "acc_stderr": 0.03023638994217309, "acc_norm": 0.3076923076923077, "acc_norm_stderr": 0.03023638994217309 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.34738186462324394, "acc_stderr": 0.01702667174865574, "acc_norm": 0.34738186462324394, "acc_norm_stderr": 0.01702667174865574 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.30346820809248554, "acc_stderr": 0.02475241196091721, "acc_norm": 0.30346820809248554, "acc_norm_stderr": 0.02475241196091721 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2446927374301676, "acc_stderr": 0.014378169884098447, "acc_norm": 0.2446927374301676, "acc_norm_stderr": 0.014378169884098447 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.28104575163398693, "acc_stderr": 0.025738854797818723, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.025738854797818723 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3633440514469453, "acc_stderr": 0.027316847674192707, "acc_norm": 0.3633440514469453, "acc_norm_stderr": 0.027316847674192707 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2716049382716049, "acc_stderr": 0.024748624490537365, "acc_norm": 0.2716049382716049, "acc_norm_stderr": 0.024748624490537365 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.026011992930902006, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.026011992930902006 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.16176470588235295, "acc_stderr": 0.02236867256288675, "acc_norm": 0.16176470588235295, "acc_norm_stderr": 0.02236867256288675 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.29411764705882354, "acc_stderr": 0.018433427649401892, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.018433427649401892 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.19090909090909092, "acc_stderr": 0.03764425585984926, "acc_norm": 0.19090909090909092, "acc_norm_stderr": 0.03764425585984926 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.1836734693877551, "acc_stderr": 0.02478907133200767, "acc_norm": 0.1836734693877551, "acc_norm_stderr": 0.02478907133200767 }, "harness|hendrycksTest-sociology|5": { "acc": 0.31840796019900497, "acc_stderr": 0.032941184790540944, "acc_norm": 0.31840796019900497, "acc_norm_stderr": 0.032941184790540944 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.35, "acc_stderr": 0.047937248544110175, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110175 }, "harness|hendrycksTest-virology|5": { "acc": 0.30120481927710846, "acc_stderr": 0.035716092300534796, "acc_norm": 0.30120481927710846, "acc_norm_stderr": 0.035716092300534796 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3391812865497076, "acc_stderr": 0.03631053496488905, "acc_norm": 0.3391812865497076, "acc_norm_stderr": 0.03631053496488905 }, "harness|truthfulqa:mc|0": { "mc1": 0.27539779681762544, "mc1_stderr": 0.01563813566777552, "mc2": 0.41928517905056045, "mc2_stderr": 0.0152672030417133 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7027219533920288, -0.8520739674568176, 0.27894413471221924, 0.19437313079833984, -0.1844756156206131, -0.07719149440526962, 0.07663765549659729, -0.2646273076534271, 0.6190919876098633, -0.0878722071647644, -0.43969592452049255, -0.7088956832885742, -0.47689828276634216, 0.22603829205036163, -0.01434045284986496, 0.8041335344314575, -0.16112510859966278, -0.11254055052995682, 0.12464278191328049, -0.03835064917802811, -0.28388604521751404, -0.3855222463607788, -0.5769091248512268, -0.332669198513031, 0.1721591055393219, 0.39968881011009216, 0.5010742545127869, 0.8833172917366028, 0.6786441206932068, 0.28520849347114563, -0.3439510762691498, -0.039317645132541656, -0.1828266680240631, -0.32345861196517944, 0.36943453550338745, -0.3699498772621155, -0.8761381506919861, 0.3400580585002899, 0.7276771068572998, 0.6997315287590027, -0.060122501105070114, 0.28865861892700195, 0.03428378328680992, 0.5459367632865906, -0.3359436094760895, 0.08128786832094193, -0.2784240245819092, 0.23051270842552185, -0.26366591453552246, -0.3533915579319, -0.2674013674259186, -0.3049553334712982, -0.11175990849733353, -0.9236019849777222, 0.2602086663246155, 0.28642696142196655, 1.5867940187454224, -0.20361833274364471, -0.21707214415073395, 0.05794626101851463, -0.04849611222743988, 1.0122854709625244, -0.8028324842453003, 0.30603787302970886, 0.7923349142074585, 0.09935464709997177, -0.18710169196128845, -0.5792267322540283, -0.6613978743553162, 0.15971505641937256, -0.35125496983528137, 0.3617316782474518, 0.005258091725409031, -0.24127337336540222, 0.3707888424396515, 0.6782706379890442, -0.6271660327911377, 0.15462584793567657, -0.6800113320350647, -0.16738192737102509, 1.069229006767273, 0.36289918422698975, 0.06862792372703552, -0.37155333161354065, -0.6955114603042603, -0.7090889811515808, -0.42946910858154297, 0.3196290135383606, 0.5028007626533508, 0.4179646074771881, -0.39987683296203613, 0.6748020648956299, -0.35155653953552246, 0.5946288704872131, 0.3704603314399719, -0.008610865101218224, 0.9360889196395874, -0.6496849656105042, -0.5346609950065613, -0.06669936329126358, 1.0996806621551514, 0.6234585642814636, 0.0811251774430275, 0.2637999355792999, 0.049769166857004166, -0.08533430844545364, -0.05552494525909424, -0.8312312960624695, -0.3200116753578186, 0.18380683660507202, -0.4281534254550934, -0.5003742575645447, 0.36425545811653137, -0.9089523553848267, 0.10316665470600128, 0.021351702511310577, 0.4467867612838745, -0.5206834673881531, -0.11653486639261246, 0.3510061502456665, -0.4504931569099426, 0.8716040253639221, -0.1987290233373642, -0.8453027606010437, 0.39803260564804077, 0.48372897505760193, 0.7648680806159973, -0.062089405953884125, -0.44695204496383667, -0.07054232805967331, -0.07605744153261185, -0.25313600897789, 0.5671796798706055, -0.2712218165397644, -0.4856509864330292, -0.29136964678764343, 0.2643628418445587, -0.26853427290916443, -0.32116034626960754, 0.7682802677154541, -0.2259848415851593, 0.16646169126033783, -0.47473371028900146, -0.6115996241569519, 0.13471369445323944, 0.36703217029571533, -0.4213181436061859, 1.2654486894607544, 0.19842931628227234, -0.8626227974891663, 0.5193237066268921, -0.6151918172836304, -0.18045811355113983, -0.011164980009198189, -0.06137561425566673, -0.8197196125984192, -0.32283252477645874, 0.25756707787513733, 0.4165423512458801, -0.12023749202489853, -0.17648935317993164, -0.4642857015132904, -0.33155256509780884, 0.33533257246017456, -0.13080888986587524, 1.298382043838501, -0.057551994919776917, -0.6966069936752319, -0.18470436334609985, -1.2515374422073364, 0.27106207609176636, 0.26806819438934326, -0.3827466666698456, -0.16191859543323517, -0.5031900405883789, -0.05560726299881935, 0.11820733547210693, 0.2805759608745575, -0.8565701246261597, 0.2809959352016449, -0.3602392375469208, 0.20311981439590454, 1.2786800861358643, 0.04969954118132591, 0.18337543308734894, -0.558020830154419, 0.491083562374115, 0.1960781365633011, 0.2078465223312378, 0.39770323038101196, -0.599475085735321, -0.7868961691856384, -0.5264438986778259, -0.016230549663305283, 0.5991541743278503, -0.15894171595573425, 1.1412487030029297, 0.09403473883867264, -0.8694676160812378, -0.43417033553123474, -0.20017658174037933, 0.3883393108844757, 0.819938600063324, 0.6030773520469666, -0.05615459755063057, -0.6549348831176758, -1.0509912967681885, -0.3109041154384613, -0.1552170217037201, 0.15445351600646973, 0.23478715121746063, 1.0177114009857178, -0.17326128482818604, 0.6476432085037231, -1.0342121124267578, -0.2488773912191391, 0.17946937680244446, -0.06831693649291992, 0.810030460357666, 0.8105907440185547, 0.6249887943267822, -0.6659229397773743, -0.49700820446014404, 0.14840459823608398, -0.9435173869132996, -0.05860316380858421, 0.1195555031299591, -0.3205209970474243, 0.05805380269885063, 0.1668388396501541, -0.7078154683113098, 0.6099915504455566, 0.18718035519123077, -1.0934865474700928, 1.0217013359069824, -0.3588091731071472, 0.5174321532249451, -0.964993417263031, 0.22650837898254395, -0.11957352608442307, 0.015187501907348633, -0.4541492462158203, 0.018819523975253105, 0.07343126088380814, 0.4203377068042755, -0.472029447555542, 0.8061216473579407, -0.7118464708328247, -0.08135447651147842, 0.48183000087738037, 0.19550177454948425, -0.13714827597141266, 0.33496254682540894, -0.13347476720809937, 0.7218374013900757, 0.8427053093910217, -0.4899024963378906, 0.5406262874603271, 0.45595768094062805, -0.22914740443229675, 0.8034944534301758, -0.47949641942977905, -0.31511569023132324, 0.25701841711997986, -0.006322069093585014, -0.8355773091316223, -0.49620282649993896, 0.04730319604277611, -0.6335757374763489, -0.12218226492404938, 0.39842140674591064, -0.27957597374916077, -0.8501840233802795, -0.9731752276420593, 0.37922531366348267, 0.6181297898292542, -0.4795253872871399, -0.15908466279506683, 0.07029247283935547, 0.06221119314432144, -0.8121418356895447, -0.8681655526161194, -0.48782241344451904, -0.2500760555267334, -0.7069509029388428, 0.29081934690475464, -0.2423018366098404, -0.20854446291923523, -0.11472274363040924, -0.2209175080060959, -0.25820252299308777, 0.012571030296385288, 0.1263197362422943, 0.6709777116775513, -0.3804060220718384, -0.3059180676937103, -0.2729966640472412, -0.21225319802761078, 0.2599773406982422, -0.14530646800994873, 0.3901084065437317, -0.4365563988685608, -0.39824923872947693, -0.43746519088745117, -0.02524247206747532, 0.6918915510177612, -0.08451443165540695, 0.7401124238967896, 0.3919617831707001, -0.3044361174106598, 0.017595499753952026, -0.2501450181007385, -0.2714284360408783, -0.5767386555671692, 0.25148940086364746, -0.550050675868988, -1.0800296068191528, 0.8346272110939026, 0.5603228807449341, 0.058474618941545486, 1.1471678018569946, 0.5818164348602295, -0.30020880699157715, 1.050925612449646, 0.05407165363430977, 0.33887770771980286, 0.37390583753585815, -0.6866728067398071, 0.12459079921245575, -0.895821213722229, -0.39643365144729614, -0.5897727012634277, -0.5294073224067688, -0.6835749745368958, -0.0008762058569118381, 0.2885356545448303, 0.18075142800807953, -0.7083035707473755, 0.5887595415115356, -0.8361506462097168, 0.614348292350769, 0.5683496594429016, 0.30254924297332764, 0.1543329656124115, -0.16602320969104767, -0.4344756305217743, -0.13232864439487457, -0.47805055975914, -0.25047391653060913, 1.278926968574524, 0.3193398118019104, 0.7628512978553772, 0.11624907702207565, 0.8721305727958679, 0.06345172971487045, -0.07269641757011414, -0.546617329120636, 0.6233252286911011, 0.1864379644393921, -0.8276222944259644, -0.4144856929779053, -0.5124971270561218, -1.1219022274017334, 0.40178075432777405, -0.1611219197511673, -0.9050427079200745, 0.19751660525798798, 0.019468974322080612, -0.3016848862171173, 0.508179247379303, -0.5242953896522522, 0.843743085861206, -0.11358189582824707, -0.4328974783420563, 0.16243316233158112, -0.8715130090713501, 0.4543439447879791, 0.1974363476037979, 0.2419411987066269, 0.06372226774692535, 0.24842573702335358, 1.1875972747802734, -0.8829011917114258, 0.4425204396247864, 0.05558963865041733, 0.0391821563243866, 0.3529963195323944, -0.12913036346435547, 0.5297618508338928, 0.18041540682315826, -0.02443760074675083, -0.14656856656074524, 0.3152533173561096, -0.9192889928817749, -0.009995722211897373, 0.8593223094940186, -1.0023219585418701, -0.557895302772522, -0.9433931708335876, -0.4924631118774414, 0.06940677016973495, 0.5785337686538696, 0.4069887399673462, 0.541691243648529, 0.028642041608691216, 0.4471038281917572, 0.8580058217048645, -0.15981927514076233, 0.6358766555786133, 0.22260376811027527, 0.09336856752634048, -0.7224723100662231, 0.842520534992218, 0.05003634840250015, 0.33324944972991943, 0.2596915066242218, 0.3994572162628174, -0.5999670624732971, -0.22406980395317078, -0.17476649582386017, 0.5360679030418396, -0.6397489309310913, -0.3281249701976776, -0.42405247688293457, -0.37792709469795227, -0.7876562476158142, -0.6817194819450378, -0.28157123923301697, -0.5094936490058899, -0.5561861395835876, -0.5392858982086182, 0.6189234256744385, 0.4399875998497009, -0.47608527541160583, 0.07899125665426254, -0.43106216192245483, 0.23108991980552673, 0.3798050880432129, 0.5562809109687805, -0.3788384795188904, -0.6233355402946472, 0.1392897069454193, -0.16840042173862457, -0.5459219217300415, -0.9274320006370544, 0.31322506070137024, -0.061262160539627075, 0.46066850423812866, 0.6165210008621216, 0.03907494992017746, 0.8852205276489258, -0.13729600608348846, 1.0302661657333374, 0.35617297887802124, -0.8118025660514832, 0.7239342331886292, -0.3533632755279541, 0.17125701904296875, 0.6662151217460632, 0.1374097317457199, -0.18869128823280334, -0.6774218082427979, -1.3244972229003906, -0.8029769062995911, 0.7177338600158691, 0.3792712092399597, -0.2562388777732849, 0.0006532942061312497, 0.14789670705795288, -0.29226821660995483, -0.18740712106227875, -0.7505381107330322, -0.8888020515441895, -0.18944968283176422, -0.4512476325035095, 0.14751684665679932, 0.03609376773238182, -0.36046072840690613, -0.8069998621940613, 0.9065949320793152, 0.03119691275060177, 0.5626512765884399, 0.4822855293750763, 0.08483051508665085, 0.09712079912424088, 0.40128204226493835, 0.9358850121498108, 0.7206172943115234, -0.5008822679519653, 0.40891575813293457, 0.38511723279953003, -1.0762639045715332, 0.4369901120662689, 0.3404671251773834, -0.06295040994882584, -0.0829315111041069, 0.4528844356536865, 0.34257712960243225, -0.027143865823745728, -0.17192485928535461, 0.6680725812911987, -0.045278262346982956, -0.6000745296478271, -0.3942076563835144, 0.013358652591705322, -0.12930089235305786, -0.01677236147224903, 0.39665064215660095, -0.12190423905849457, -0.07895711809396744, -0.5085102319717407, 0.4529244899749756, 0.3566693365573883, -0.4491749405860901, -0.15478503704071045, 0.7673402428627014, -0.18441742658615112, -0.09520719200372696, 0.2904934287071228, -0.19227927923202515, -0.6059327125549316, 1.1067942380905151, 0.6107404828071594, 0.6239215731620789, -0.26770034432411194, -0.10209319740533829, 0.9597067832946777, 0.3890281617641449, -0.047634564340114594, 0.6063527464866638, 0.3292684257030487, -0.2908734977245331, 0.22826910018920898, -0.8966477513313293, 0.027555523440241814, 0.188180074095726, -0.812913179397583, 0.32272768020629883, -0.6192575693130493, -0.23130519688129425, 0.038976218551397324, 0.3970412313938141, -0.4388466477394104, 0.5624139308929443, -0.3912404775619507, 1.2507548332214355, -0.9959746599197388, 0.6627625823020935, 0.7284961342811584, -0.5707695484161377, -1.0698018074035645, -0.6223940253257751, -0.0029598630499094725, -0.8425904512405396, 0.5918756723403931, -0.0957253947854042, 0.1942983716726303, -0.07910746335983276, -0.7666118144989014, -0.9492866396903992, 1.3888496160507202, -0.05242644622921944, -0.33867543935775757, 0.3163977265357971, -0.0361139290034771, 0.44017207622528076, 0.1335374414920807, 0.6257345080375671, 0.7880759835243225, 0.8163318037986755, -0.04101251810789108, -0.7005467414855957, 0.3129732310771942, -0.47240400314331055, -0.3279668390750885, 0.47319021821022034, -0.9539861083030701, 1.242133378982544, 0.024465356022119522, 0.18275532126426697, -0.13241665065288544, 0.6900467276573181, 0.7786165475845337, 0.2145138829946518, 0.3636331558227539, 0.9299502968788147, 0.8895410299301147, -0.5190814137458801, 0.9691881537437439, -0.1581830382347107, 0.8696735501289368, 0.6742568016052246, 0.21956245601177216, 0.7627894878387451, 0.6556172370910645, -0.5419434905052185, 0.5189083814620972, 0.8667784929275513, -0.3009246587753296, 0.39691779017448425, 0.28729888796806335, -0.1296146959066391, -0.13252660632133484, 0.43514060974121094, -0.90267014503479, 0.17185774445533752, 0.11770735681056976, -0.3568975627422333, 0.04828313738107681, -0.4715707004070282, 0.3456529974937439, -0.06190526857972145, -0.08877395838499069, 0.3465140759944916, 0.03678591549396515, -0.3635273575782776, 0.9228240251541138, -0.1180817261338234, 0.7583125829696655, -0.4993301331996918, -0.06226726993918419, -0.4098084568977356, 0.6133031249046326, -0.45674577355384827, -1.0633723735809326, 0.14666016399860382, 0.06105373799800873, -0.10691457241773605, -0.18407362699508667, 0.6607968807220459, -0.1942111700773239, -0.7548980712890625, 0.09578204900026321, 0.01441618986427784, 0.10659995675086975, 0.558181643486023, -0.6993802189826965, -0.33042585849761963, -0.06446602940559387, -0.4925379455089569, 0.08611255884170532, 0.333711713552475, 0.30847984552383423, 0.5427915453910828, 0.6636871099472046, 0.20803864300251007, 0.40167221426963806, -0.5650044083595276, 0.7749194502830505, -1.0718555450439453, -0.7716786861419678, -0.9492107033729553, 0.3864331841468811, -0.33285194635391235, -0.8901457190513611, 0.99642014503479, 1.0693693161010742, 0.9171085357666016, 0.011990168131887913, 0.6516900062561035, -0.4047154188156128, 0.3183746933937073, -0.3725271224975586, 0.9670355319976807, -0.8159176707267761, -0.2657121419906616, -0.2834565043449402, -0.7244570255279541, -0.4717990756034851, 0.902184247970581, -0.20569005608558655, 0.07641248404979706, 1.097438931465149, 0.6572588682174683, -0.11912375688552856, 0.08234384655952454, -0.09599102288484573, 0.5468327403068542, 0.3853107690811157, 1.0318037271499634, 0.65281742811203, -0.8038417100906372, 0.38337400555610657, -0.43848973512649536, -0.4466499984264374, -0.3923209607601166, -0.5168681144714355, -0.8780664801597595, -0.40993067622184753, -0.1829332560300827, -0.6317658424377441, -0.1537797749042511, 0.9790832996368408, 0.46322476863861084, -0.9085742235183716, -0.45764756202697754, -0.11709858477115631, 0.1363154798746109, -0.5897974967956543, -0.4086105525493622, 0.8057553172111511, -0.07033880800008774, -0.533968448638916, 0.15287883579730988, -0.09320859611034393, 0.28964963555336, 0.14973504841327667, -0.35809609293937683, -0.7291820645332336, 0.0013895491138100624, 0.4035434126853943, 0.43461668491363525, -0.6493120193481445, -0.7319515943527222, 0.23884722590446472, -0.5398330688476562, 0.43236827850341797, -0.03649357333779335, -0.5423066020011902, 0.09613504260778427, 0.6720831394195557, 0.47389793395996094, 0.6914610266685486, 0.007767460308969021, 0.06091661751270294, -0.6745602488517761, 0.24221785366535187, -0.006687916815280914, 0.27344727516174316, -0.008333113975822926, -0.2925388216972351, 0.7623128890991211, 0.7363644242286682, -0.4939258396625519, -1.063262939453125, -0.45753273367881775, -1.4497746229171753, 0.0063974508084356785, 1.0701930522918701, 0.021364489570260048, -0.5350722670555115, 0.21513542532920837, -0.10843756794929504, 0.13371354341506958, -0.34904900193214417, 0.7460814118385315, 0.7626356482505798, -0.34438714385032654, 0.18846678733825684, -0.5962200164794922, 0.39247700572013855, 0.4991644024848938, -1.1771085262298584, -0.09921043366193771, 0.1812513917684555, 0.31317901611328125, 0.36113041639328003, 0.6440126895904541, -0.17262940108776093, 0.2694205939769745, 0.2862536907196045, 0.04590507969260216, 0.026413749903440475, 0.10430121421813965, -0.22837451100349426, 0.0895158126950264, -0.25621944665908813, -0.48373284935951233 ]
open-llm-leaderboard/details_yeontaek__Platypus2-13B-LoRa-v2
open-llm-leaderboard
2023-08-29T11:22:55Z
201
0
[ "region:us" ]
null
2023-08-29T11:21:57Z
--- pretty_name: Evaluation run of yeontaek/Platypus2-13B-LoRa-v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yeontaek/Platypus2-13B-LoRa-v2](https://huggingface.co/yeontaek/Platypus2-13B-LoRa-v2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yeontaek__Platypus2-13B-LoRa-v2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T11:20:59.240376](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__Platypus2-13B-LoRa-v2/blob/main/results_2023-08-29T11%3A20%3A59.240376.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.571991245483798,\n \"\ acc_stderr\": 0.034294067141786025,\n \"acc_norm\": 0.5761375119651778,\n\ \ \"acc_norm_stderr\": 0.03427336583128381,\n \"mc1\": 0.28151774785801714,\n\ \ \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.4191985438925104,\n\ \ \"mc2_stderr\": 0.014270484892545822\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5563139931740614,\n \"acc_stderr\": 0.014518421825670444,\n\ \ \"acc_norm\": 0.5947098976109215,\n \"acc_norm_stderr\": 0.014346869060229328\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6179047998406691,\n\ \ \"acc_stderr\": 0.004849065962692132,\n \"acc_norm\": 0.8241386178052181,\n\ \ \"acc_norm_stderr\": 0.003799241408502969\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n\ \ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n\ \ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.039889037033362836,\n\ \ \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.039889037033362836\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n\ \ \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6150943396226415,\n \"acc_stderr\": 0.02994649856769995,\n\ \ \"acc_norm\": 0.6150943396226415,\n \"acc_norm_stderr\": 0.02994649856769995\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6111111111111112,\n\ \ \"acc_stderr\": 0.04076663253918567,\n \"acc_norm\": 0.6111111111111112,\n\ \ \"acc_norm_stderr\": 0.04076663253918567\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \ \ \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n\ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5722543352601156,\n\ \ \"acc_stderr\": 0.03772446857518026,\n \"acc_norm\": 0.5722543352601156,\n\ \ \"acc_norm_stderr\": 0.03772446857518026\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.047240073523838876,\n\ \ \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.047240073523838876\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n\ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4297872340425532,\n \"acc_stderr\": 0.03236214467715564,\n\ \ \"acc_norm\": 0.4297872340425532,\n \"acc_norm_stderr\": 0.03236214467715564\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n\ \ \"acc_stderr\": 0.043727482902780064,\n \"acc_norm\": 0.3157894736842105,\n\ \ \"acc_norm_stderr\": 0.043727482902780064\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.04144311810878151,\n\ \ \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.04144311810878151\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3386243386243386,\n \"acc_stderr\": 0.02437319786798306,\n \"\ acc_norm\": 0.3386243386243386,\n \"acc_norm_stderr\": 0.02437319786798306\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n\ \ \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n\ \ \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6709677419354839,\n\ \ \"acc_stderr\": 0.02672949906834996,\n \"acc_norm\": 0.6709677419354839,\n\ \ \"acc_norm_stderr\": 0.02672949906834996\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n\ \ \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\"\ : 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.03546563019624336,\n\ \ \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.03546563019624336\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7424242424242424,\n \"acc_stderr\": 0.031156269519646836,\n \"\ acc_norm\": 0.7424242424242424,\n \"acc_norm_stderr\": 0.031156269519646836\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8186528497409327,\n \"acc_stderr\": 0.02780703236068609,\n\ \ \"acc_norm\": 0.8186528497409327,\n \"acc_norm_stderr\": 0.02780703236068609\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5307692307692308,\n \"acc_stderr\": 0.025302958890850154,\n\ \ \"acc_norm\": 0.5307692307692308,\n \"acc_norm_stderr\": 0.025302958890850154\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2777777777777778,\n \"acc_stderr\": 0.027309140588230172,\n \ \ \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.027309140588230172\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6218487394957983,\n \"acc_stderr\": 0.031499305777849054,\n\ \ \"acc_norm\": 0.6218487394957983,\n \"acc_norm_stderr\": 0.031499305777849054\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"\ acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7688073394495413,\n \"acc_stderr\": 0.01807575024163315,\n \"\ acc_norm\": 0.7688073394495413,\n \"acc_norm_stderr\": 0.01807575024163315\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"\ acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7549019607843137,\n \"acc_stderr\": 0.03019028245350195,\n \"\ acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.03019028245350195\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7637130801687764,\n \"acc_stderr\": 0.027652153144159263,\n \ \ \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.027652153144159263\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6591928251121076,\n\ \ \"acc_stderr\": 0.03181149747055359,\n \"acc_norm\": 0.6591928251121076,\n\ \ \"acc_norm_stderr\": 0.03181149747055359\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6335877862595419,\n \"acc_stderr\": 0.04225875451969637,\n\ \ \"acc_norm\": 0.6335877862595419,\n \"acc_norm_stderr\": 0.04225875451969637\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7107438016528925,\n \"acc_stderr\": 0.04139112727635463,\n \"\ acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.04139112727635463\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n\ \ \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.7407407407407407,\n\ \ \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n\ \ \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n\ \ \"acc_stderr\": 0.04432804055291517,\n \"acc_norm\": 0.32142857142857145,\n\ \ \"acc_norm_stderr\": 0.04432804055291517\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\ \ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n\ \ \"acc_stderr\": 0.025598193686652244,\n \"acc_norm\": 0.811965811965812,\n\ \ \"acc_norm_stderr\": 0.025598193686652244\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \ \ \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7611749680715197,\n\ \ \"acc_stderr\": 0.015246803197398682,\n \"acc_norm\": 0.7611749680715197,\n\ \ \"acc_norm_stderr\": 0.015246803197398682\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.025305258131879716,\n\ \ \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.025305258131879716\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.41675977653631285,\n\ \ \"acc_stderr\": 0.016489134962438954,\n \"acc_norm\": 0.41675977653631285,\n\ \ \"acc_norm_stderr\": 0.016489134962438954\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6241830065359477,\n \"acc_stderr\": 0.027732834353363947,\n\ \ \"acc_norm\": 0.6241830065359477,\n \"acc_norm_stderr\": 0.027732834353363947\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.662379421221865,\n\ \ \"acc_stderr\": 0.02685882587948854,\n \"acc_norm\": 0.662379421221865,\n\ \ \"acc_norm_stderr\": 0.02685882587948854\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.654320987654321,\n \"acc_stderr\": 0.026462487777001872,\n\ \ \"acc_norm\": 0.654320987654321,\n \"acc_norm_stderr\": 0.026462487777001872\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.46099290780141844,\n \"acc_stderr\": 0.02973659252642444,\n \ \ \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.02973659252642444\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4602346805736636,\n\ \ \"acc_stderr\": 0.01272978538659857,\n \"acc_norm\": 0.4602346805736636,\n\ \ \"acc_norm_stderr\": 0.01272978538659857\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5698529411764706,\n \"acc_stderr\": 0.030074971917302875,\n\ \ \"acc_norm\": 0.5698529411764706,\n \"acc_norm_stderr\": 0.030074971917302875\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5996732026143791,\n \"acc_stderr\": 0.01982184368827176,\n \ \ \"acc_norm\": 0.5996732026143791,\n \"acc_norm_stderr\": 0.01982184368827176\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n\ \ \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n\ \ \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5959183673469388,\n \"acc_stderr\": 0.031414708025865885,\n\ \ \"acc_norm\": 0.5959183673469388,\n \"acc_norm_stderr\": 0.031414708025865885\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.746268656716418,\n\ \ \"acc_stderr\": 0.03076944496729602,\n \"acc_norm\": 0.746268656716418,\n\ \ \"acc_norm_stderr\": 0.03076944496729602\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \ \ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4397590361445783,\n\ \ \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.4397590361445783,\n\ \ \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.031885780176863984,\n\ \ \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.031885780176863984\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28151774785801714,\n\ \ \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.4191985438925104,\n\ \ \"mc2_stderr\": 0.014270484892545822\n }\n}\n```" repo_url: https://huggingface.co/yeontaek/Platypus2-13B-LoRa-v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|arc:challenge|25_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hellaswag|10_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T11:20:59.240376.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T11:20:59.240376.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T11_20_59.240376 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T11:20:59.240376.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T11:20:59.240376.parquet' - config_name: results data_files: - split: 2023_08_29T11_20_59.240376 path: - results_2023-08-29T11:20:59.240376.parquet - split: latest path: - results_2023-08-29T11:20:59.240376.parquet --- # Dataset Card for Evaluation run of yeontaek/Platypus2-13B-LoRa-v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yeontaek/Platypus2-13B-LoRa-v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yeontaek/Platypus2-13B-LoRa-v2](https://huggingface.co/yeontaek/Platypus2-13B-LoRa-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yeontaek__Platypus2-13B-LoRa-v2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T11:20:59.240376](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__Platypus2-13B-LoRa-v2/blob/main/results_2023-08-29T11%3A20%3A59.240376.json): ```python { "all": { "acc": 0.571991245483798, "acc_stderr": 0.034294067141786025, "acc_norm": 0.5761375119651778, "acc_norm_stderr": 0.03427336583128381, "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.4191985438925104, "mc2_stderr": 0.014270484892545822 }, "harness|arc:challenge|25": { "acc": 0.5563139931740614, "acc_stderr": 0.014518421825670444, "acc_norm": 0.5947098976109215, "acc_norm_stderr": 0.014346869060229328 }, "harness|hellaswag|10": { "acc": 0.6179047998406691, "acc_stderr": 0.004849065962692132, "acc_norm": 0.8241386178052181, "acc_norm_stderr": 0.003799241408502969 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.039889037033362836, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6150943396226415, "acc_stderr": 0.02994649856769995, "acc_norm": 0.6150943396226415, "acc_norm_stderr": 0.02994649856769995 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6111111111111112, "acc_stderr": 0.04076663253918567, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.04076663253918567 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5722543352601156, "acc_stderr": 0.03772446857518026, "acc_norm": 0.5722543352601156, "acc_norm_stderr": 0.03772446857518026 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.047240073523838876, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.047240073523838876 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4297872340425532, "acc_stderr": 0.03236214467715564, "acc_norm": 0.4297872340425532, "acc_norm_stderr": 0.03236214467715564 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780064, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780064 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878151, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878151 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3386243386243386, "acc_stderr": 0.02437319786798306, "acc_norm": 0.3386243386243386, "acc_norm_stderr": 0.02437319786798306 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6709677419354839, "acc_stderr": 0.02672949906834996, "acc_norm": 0.6709677419354839, "acc_norm_stderr": 0.02672949906834996 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7090909090909091, "acc_stderr": 0.03546563019624336, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.03546563019624336 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7424242424242424, "acc_stderr": 0.031156269519646836, "acc_norm": 0.7424242424242424, "acc_norm_stderr": 0.031156269519646836 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8186528497409327, "acc_stderr": 0.02780703236068609, "acc_norm": 0.8186528497409327, "acc_norm_stderr": 0.02780703236068609 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5307692307692308, "acc_stderr": 0.025302958890850154, "acc_norm": 0.5307692307692308, "acc_norm_stderr": 0.025302958890850154 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.027309140588230172, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.027309140588230172 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6218487394957983, "acc_stderr": 0.031499305777849054, "acc_norm": 0.6218487394957983, "acc_norm_stderr": 0.031499305777849054 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7688073394495413, "acc_stderr": 0.01807575024163315, "acc_norm": 0.7688073394495413, "acc_norm_stderr": 0.01807575024163315 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.034093869469927006, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7549019607843137, "acc_stderr": 0.03019028245350195, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.03019028245350195 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.027652153144159263, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.027652153144159263 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6591928251121076, "acc_stderr": 0.03181149747055359, "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.03181149747055359 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.04225875451969637, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969637 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.04139112727635463, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.04139112727635463 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094633, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094633 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6687116564417178, "acc_stderr": 0.03697983910025588, "acc_norm": 0.6687116564417178, "acc_norm_stderr": 0.03697983910025588 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291517, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291517 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.025598193686652244, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.025598193686652244 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7611749680715197, "acc_stderr": 0.015246803197398682, "acc_norm": 0.7611749680715197, "acc_norm_stderr": 0.015246803197398682 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6705202312138728, "acc_stderr": 0.025305258131879716, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.025305258131879716 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.41675977653631285, "acc_stderr": 0.016489134962438954, "acc_norm": 0.41675977653631285, "acc_norm_stderr": 0.016489134962438954 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6241830065359477, "acc_stderr": 0.027732834353363947, "acc_norm": 0.6241830065359477, "acc_norm_stderr": 0.027732834353363947 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.662379421221865, "acc_stderr": 0.02685882587948854, "acc_norm": 0.662379421221865, "acc_norm_stderr": 0.02685882587948854 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.654320987654321, "acc_stderr": 0.026462487777001872, "acc_norm": 0.654320987654321, "acc_norm_stderr": 0.026462487777001872 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.02973659252642444, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4602346805736636, "acc_stderr": 0.01272978538659857, "acc_norm": 0.4602346805736636, "acc_norm_stderr": 0.01272978538659857 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5698529411764706, "acc_stderr": 0.030074971917302875, "acc_norm": 0.5698529411764706, "acc_norm_stderr": 0.030074971917302875 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5996732026143791, "acc_stderr": 0.01982184368827176, "acc_norm": 0.5996732026143791, "acc_norm_stderr": 0.01982184368827176 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5959183673469388, "acc_stderr": 0.031414708025865885, "acc_norm": 0.5959183673469388, "acc_norm_stderr": 0.031414708025865885 }, "harness|hendrycksTest-sociology|5": { "acc": 0.746268656716418, "acc_stderr": 0.03076944496729602, "acc_norm": 0.746268656716418, "acc_norm_stderr": 0.03076944496729602 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699121, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.031885780176863984, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.031885780176863984 }, "harness|truthfulqa:mc|0": { "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.4191985438925104, "mc2_stderr": 0.014270484892545822 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7199947237968445, -0.8388698101043701, 0.2810790538787842, 0.19512590765953064, -0.2053365260362625, -0.09166800230741501, 0.04976676404476166, -0.26033562421798706, 0.6138848066329956, -0.08293872326612473, -0.5037350654602051, -0.6968496441841125, -0.4590703547000885, 0.18769387900829315, -0.007758746854960918, 0.7922900319099426, -0.17321686446666718, -0.16529573500156403, 0.08971604704856873, -0.07700957357883453, -0.2776115834712982, -0.3693055808544159, -0.4693096876144409, -0.3733551502227783, 0.1714012175798416, 0.40650781989097595, 0.47015875577926636, 0.811514675617218, 0.6704974174499512, 0.3143794536590576, -0.35284414887428284, -0.029272526502609253, -0.20586837828159332, -0.31345003843307495, 0.3814617991447449, -0.3484661877155304, -0.8631682991981506, 0.30123040080070496, 0.7458025217056274, 0.7212146520614624, -0.035866592079401016, 0.3288175165653229, 0.04087839275598526, 0.5788732171058655, -0.3119741976261139, 0.06900070607662201, -0.27843111753463745, 0.2217792421579361, -0.20874325931072235, -0.30012941360473633, -0.27282896637916565, -0.2985222637653351, -0.08141101151704788, -0.9329141974449158, 0.2253287434577942, 0.25864654779434204, 1.5712165832519531, -0.18930189311504364, -0.24996444582939148, 0.09569794684648514, -0.07563193887472153, 1.0477890968322754, -0.8658262491226196, 0.34121134877204895, 0.8061147332191467, 0.1314166784286499, -0.1632954329252243, -0.6134138703346252, -0.6368584632873535, 0.14275532960891724, -0.37203240394592285, 0.3694302439689636, -0.0019156488124281168, -0.2573208808898926, 0.35234424471855164, 0.6410850286483765, -0.6266593337059021, 0.16766871511936188, -0.6769404411315918, -0.1633247584104538, 1.0310163497924805, 0.362775057554245, 0.056643031537532806, -0.36397475004196167, -0.6785491108894348, -0.6678560972213745, -0.36924290657043457, 0.30484646558761597, 0.4835120439529419, 0.39920005202293396, -0.41916099190711975, 0.7074645757675171, -0.40365996956825256, 0.5662547945976257, 0.41279542446136475, -0.016577735543251038, 0.9524781107902527, -0.6532227993011475, -0.5120311975479126, -0.06523513793945312, 1.121755838394165, 0.6412268280982971, 0.02842913195490837, 0.2166091352701187, 0.06100864335894585, -0.048828281462192535, -0.05890916660428047, -0.8791639804840088, -0.28851082921028137, 0.19180899858474731, -0.408188134431839, -0.4574032425880432, 0.36090219020843506, -0.9061850905418396, 0.12446554750204086, -0.016093146055936813, 0.4683225452899933, -0.5081222057342529, -0.1359969824552536, 0.34056299924850464, -0.40660804510116577, 0.8394785523414612, -0.16955269873142242, -0.7893250584602356, 0.3997654318809509, 0.5040448904037476, 0.7834592461585999, -0.06931967288255692, -0.42769911885261536, -0.10797018557786942, -0.10948086529970169, -0.2527076005935669, 0.5724183320999146, -0.221721351146698, -0.46463194489479065, -0.26918867230415344, 0.294900506734848, -0.24853011965751648, -0.3393439054489136, 0.7320173382759094, -0.23809093236923218, 0.20983180403709412, -0.46811211109161377, -0.6271353960037231, 0.1458531618118286, 0.3824220299720764, -0.4545076787471771, 1.2857472896575928, 0.24602088332176208, -0.870236873626709, 0.4400598406791687, -0.59556645154953, -0.18644951283931732, -0.005972136743366718, -0.05954288691282272, -0.8455914855003357, -0.281616747379303, 0.19661489129066467, 0.39201727509498596, -0.14361830055713654, -0.17374588549137115, -0.41007548570632935, -0.33248016238212585, 0.3476431667804718, -0.11391102522611618, 1.2586917877197266, -0.07449212670326233, -0.7215406894683838, -0.13148953020572662, -1.267654538154602, 0.35859695076942444, 0.28220367431640625, -0.3800387680530548, -0.17098553478717804, -0.4922938048839569, -0.03945333883166313, 0.17204934358596802, 0.3033193051815033, -0.784599781036377, 0.2668013572692871, -0.36976057291030884, 0.2022850662469864, 1.276050090789795, 0.04230829328298569, 0.16693158447742462, -0.5650768876075745, 0.44693827629089355, 0.2048884928226471, 0.16928434371948242, 0.41526126861572266, -0.63739413022995, -0.7805893421173096, -0.46230730414390564, -0.06036904454231262, 0.5999568700790405, -0.17697349190711975, 1.1790624856948853, 0.07843418419361115, -0.8815160989761353, -0.43225061893463135, -0.13386867940425873, 0.40234485268592834, 0.8496551513671875, 0.610605001449585, -0.0346355065703392, -0.6336936354637146, -1.0549087524414062, -0.2577248811721802, -0.1878352016210556, 0.1858496516942978, 0.1879791021347046, 1.030655026435852, -0.2172149419784546, 0.6426088809967041, -1.0280063152313232, -0.2063969522714615, 0.15172696113586426, -0.05691719800233841, 0.8077945113182068, 0.7587152719497681, 0.6213700175285339, -0.641243040561676, -0.5306209325790405, 0.16517262160778046, -0.9475767016410828, -0.06144079193472862, 0.15638220310211182, -0.3276402950286865, 0.06142338365316391, 0.13782162964344025, -0.6983833312988281, 0.5733692049980164, 0.222730815410614, -1.1175957918167114, 1.017608404159546, -0.3621233105659485, 0.53509920835495, -1.0206018686294556, 0.21630342304706573, -0.0797821432352066, 0.035607386380434036, -0.500031590461731, 0.012956131249666214, 0.0766163095831871, 0.44906145334243774, -0.5077049732208252, 0.798211395740509, -0.685943067073822, -0.06561379879713058, 0.4555084705352783, 0.1847390979528427, -0.10956954956054688, 0.34856313467025757, -0.14503541588783264, 0.7589927315711975, 0.8022042512893677, -0.4803939461708069, 0.5343374013900757, 0.468259334564209, -0.274212509393692, 0.7995917797088623, -0.505486249923706, -0.2579227089881897, 0.263803094625473, -0.06701833754777908, -0.8533568382263184, -0.4834415018558502, 0.039998289197683334, -0.5940659642219543, -0.141816645860672, 0.3627495765686035, -0.27818596363067627, -0.8379863500595093, -0.9860824942588806, 0.3544045388698578, 0.7093605995178223, -0.4649645686149597, -0.17530465126037598, 0.04381387680768967, 0.11369099467992783, -0.7995995283126831, -0.8549070358276367, -0.534673810005188, -0.2516396939754486, -0.7522580027580261, 0.2764030992984772, -0.3128027319908142, -0.25961488485336304, -0.08623096346855164, -0.2435038983821869, -0.28765222430229187, 0.023689940571784973, 0.15991716086864471, 0.6835986971855164, -0.44764065742492676, -0.30693817138671875, -0.29452982544898987, -0.21102112531661987, 0.22158172726631165, -0.13343733549118042, 0.42084965109825134, -0.45456477999687195, -0.3957366347312927, -0.5147742629051208, -0.022157112136483192, 0.6748658418655396, -0.07866338640451431, 0.7928339242935181, 0.43523192405700684, -0.3116907477378845, 0.03054504282772541, -0.22750675678253174, -0.29387736320495605, -0.5891247987747192, 0.29101628065109253, -0.5249125957489014, -1.063766598701477, 0.8293433785438538, 0.5262318253517151, 0.05842255800962448, 1.1786798238754272, 0.5861428380012512, -0.31132787466049194, 1.025829792022705, 0.02792290225625038, 0.31592246890068054, 0.35701483488082886, -0.7351726293563843, 0.10890144109725952, -0.916864275932312, -0.3616551160812378, -0.6016361117362976, -0.5304059386253357, -0.6997632384300232, -0.06456602364778519, 0.26393216848373413, 0.20149916410446167, -0.6993570327758789, 0.5769429802894592, -0.8118202090263367, 0.5929500460624695, 0.5440924167633057, 0.25727325677871704, 0.16442662477493286, -0.13811402022838593, -0.37594491243362427, -0.16815225780010223, -0.4419262111186981, -0.23505175113677979, 1.2490981817245483, 0.3100217878818512, 0.7591764330863953, 0.12532691657543182, 0.8996919393539429, 0.03382211923599243, -0.1086188405752182, -0.5682005286216736, 0.65921550989151, 0.15180641412734985, -0.8027288317680359, -0.41838356852531433, -0.5322173237800598, -1.0842324495315552, 0.3829796016216278, -0.16098913550376892, -0.9030532836914062, 0.12865637242794037, 0.030349930748343468, -0.24249285459518433, 0.522875189781189, -0.5647764205932617, 0.8461374044418335, -0.13773687183856964, -0.5016719102859497, 0.15894928574562073, -0.8466558456420898, 0.439691424369812, 0.20985235273838043, 0.24642224609851837, 0.06003541871905327, 0.26643437147140503, 1.2261091470718384, -0.8657439351081848, 0.46187612414360046, 0.02741360291838646, 0.010801785625517368, 0.40323346853256226, -0.1754368394613266, 0.5544529557228088, 0.15586566925048828, -0.024310356006026268, -0.13333775103092194, 0.2789284586906433, -0.8676335215568542, -0.022139642387628555, 0.9183636903762817, -1.0400902032852173, -0.5634863972663879, -0.9167699217796326, -0.4822264611721039, 0.018978236243128777, 0.5637069940567017, 0.4102185070514679, 0.5153256058692932, 0.017115609720349312, 0.4696387052536011, 0.8756828904151917, -0.14574529230594635, 0.5928947925567627, 0.2154301404953003, 0.08618462085723877, -0.6698967218399048, 0.8853836059570312, 0.07789589464664459, 0.37916335463523865, 0.23668189346790314, 0.4147835373878479, -0.588538408279419, -0.2372322529554367, -0.1671709567308426, 0.545376718044281, -0.6627516150474548, -0.3008146584033966, -0.38789454102516174, -0.404215931892395, -0.7772113084793091, -0.6316944360733032, -0.31448861956596375, -0.5214206576347351, -0.5369941592216492, -0.5117520093917847, 0.6174352169036865, 0.4568030536174774, -0.40584972500801086, 0.12341449409723282, -0.4274992048740387, 0.3251323103904724, 0.4036254286766052, 0.608750581741333, -0.4065219461917877, -0.6363348960876465, 0.11120130121707916, -0.15502002835273743, -0.5117220878601074, -0.9032121300697327, 0.27430546283721924, -0.05204670876264572, 0.48578307032585144, 0.6125601530075073, 0.03019428625702858, 0.8735280632972717, -0.17770229279994965, 1.0291824340820312, 0.3952997326850891, -0.7740424275398254, 0.7283890843391418, -0.315774142742157, 0.19051265716552734, 0.6834849119186401, 0.1253422498703003, -0.20525839924812317, -0.6102249622344971, -1.2784461975097656, -0.814103364944458, 0.7282743453979492, 0.3775098919868469, -0.2526280879974365, 0.036795876920223236, 0.13738110661506653, -0.2736038565635681, -0.2060336172580719, -0.7348324060440063, -0.9088924527168274, -0.2250576615333557, -0.49879974126815796, 0.1051209419965744, 0.025286996737122536, -0.3781716227531433, -0.7876884937286377, 0.8905563950538635, -0.018812911584973335, 0.58367520570755, 0.4429225027561188, 0.05875413119792938, 0.052520450204610825, 0.4611343741416931, 0.9615541696548462, 0.7277616262435913, -0.5276500582695007, 0.39986786246299744, 0.362476646900177, -1.0689752101898193, 0.4308582842350006, 0.33441343903541565, -0.05979127064347267, -0.06527815014123917, 0.48106086254119873, 0.42467695474624634, -0.030430426821112633, -0.1740579456090927, 0.6213831901550293, -0.04060965031385422, -0.6255667805671692, -0.3686677813529968, 0.042173340916633606, -0.05971244350075722, -0.0016634895000606775, 0.38321346044540405, -0.17892523109912872, -0.11005847901105881, -0.5209881067276001, 0.4366109371185303, 0.38869762420654297, -0.45817914605140686, -0.2072758674621582, 0.745582640171051, -0.1909470111131668, -0.09999395161867142, 0.2847636938095093, -0.20263762772083282, -0.5903251767158508, 1.1043479442596436, 0.6499303579330444, 0.6111494898796082, -0.2649375796318054, -0.08713915199041367, 0.9626853466033936, 0.36964935064315796, -0.07787228375673294, 0.5745494961738586, 0.3758675754070282, -0.2623305916786194, 0.21709270775318146, -0.8649221062660217, 0.04596644639968872, 0.23452253639698029, -0.8023238778114319, 0.29393553733825684, -0.6071155667304993, -0.23816612362861633, 0.02648506872355938, 0.421321302652359, -0.4615974426269531, 0.5356481075286865, -0.39410021901130676, 1.2230021953582764, -0.9792075753211975, 0.6859826445579529, 0.7478853464126587, -0.5306353569030762, -1.0381335020065308, -0.5799620747566223, 0.00023202550073619932, -0.8498736619949341, 0.5877960324287415, -0.0725800171494484, 0.17301765084266663, -0.08931881189346313, -0.7420556545257568, -0.8937435150146484, 1.427253007888794, -0.0698067843914032, -0.42732831835746765, 0.2737049460411072, 0.008383617736399174, 0.4499189853668213, 0.15842075645923615, 0.5773386359214783, 0.7485494017601013, 0.8220999240875244, -0.03464816138148308, -0.7195696830749512, 0.28101062774658203, -0.4703361690044403, -0.36736392974853516, 0.4720152020454407, -0.9505936503410339, 1.229175090789795, -0.008713781833648682, 0.20680147409439087, -0.1279422640800476, 0.6841323375701904, 0.7698817849159241, 0.26363807916641235, 0.36524003744125366, 0.9329552054405212, 0.866328775882721, -0.5038667917251587, 0.9413689970970154, -0.17886576056480408, 0.8508442044258118, 0.7487996220588684, 0.17173954844474792, 0.7685147523880005, 0.6948633790016174, -0.546172559261322, 0.5237888097763062, 0.7800623774528503, -0.2610505521297455, 0.41396233439445496, 0.24482783675193787, -0.12339804321527481, -0.13446645438671112, 0.4292648136615753, -0.9176374077796936, 0.13309578597545624, 0.09083492308855057, -0.3010895252227783, 0.05685838311910629, -0.48769611120224, 0.29820406436920166, -0.044695887714624405, -0.07646314054727554, 0.3680330216884613, 0.03409964218735695, -0.3627583384513855, 0.9172217845916748, -0.11395996063947678, 0.7730743288993835, -0.556336522102356, -0.04804166406393051, -0.39954182505607605, 0.594219982624054, -0.43290427327156067, -1.0925750732421875, 0.10596662759780884, 0.04862916097044945, -0.0919332206249237, -0.1351981908082962, 0.6897497177124023, -0.1790827065706253, -0.7805577516555786, 0.13557098805904388, 0.06853743642568588, 0.04980209097266197, 0.5509633421897888, -0.6748367547988892, -0.320563405752182, -0.06643068045377731, -0.526028573513031, 0.11849687248468399, 0.33918580412864685, 0.27504464983940125, 0.5262771248817444, 0.6335667967796326, 0.1742914468050003, 0.43184807896614075, -0.5275675654411316, 0.7950345277786255, -1.086116075515747, -0.7642868757247925, -0.90581876039505, 0.4467446506023407, -0.3353827893733978, -0.866852879524231, 0.9741465449333191, 1.0494003295898438, 0.8619766235351562, 0.016229309141635895, 0.6541205048561096, -0.39063510298728943, 0.3161962032318115, -0.39866378903388977, 0.9355915784835815, -0.8176551461219788, -0.24708151817321777, -0.24921472370624542, -0.7010830640792847, -0.39812180399894714, 0.8760128021240234, -0.20769095420837402, 0.07090986520051956, 1.083776593208313, 0.6178176403045654, -0.14780397713184357, 0.02284282259643078, -0.07493190467357635, 0.5671750903129578, 0.3740844428539276, 1.041521430015564, 0.6709750294685364, -0.8227805495262146, 0.3512245714664459, -0.4676409661769867, -0.38864296674728394, -0.41650959849357605, -0.4815353751182556, -0.9337564706802368, -0.47426751255989075, -0.20366884768009186, -0.6424597501754761, -0.18154633045196533, 0.9762501120567322, 0.45202285051345825, -0.9191458225250244, -0.42543914914131165, -0.06618423759937286, 0.10310423374176025, -0.5447896718978882, -0.4066816568374634, 0.8057844042778015, -0.06165478006005287, -0.5091062784194946, 0.1545126587152481, -0.14173085987567902, 0.2885156571865082, 0.15425875782966614, -0.4334874451160431, -0.7399092316627502, -0.003645752789452672, 0.4345303177833557, 0.39318129420280457, -0.6352270841598511, -0.7227916717529297, 0.27049577236175537, -0.5259590148925781, 0.47629937529563904, -0.030983928591012955, -0.5545920729637146, 0.06921744346618652, 0.6963244080543518, 0.4790751338005066, 0.6609668731689453, -0.019436253234744072, -0.021297423169016838, -0.6597412824630737, 0.2757284939289093, -0.029370740056037903, 0.2597997784614563, -0.008315157145261765, -0.2538453936576843, 0.7373450994491577, 0.7134138941764832, -0.4672534167766571, -1.0672874450683594, -0.45030200481414795, -1.4866708517074585, -0.001424762886017561, 1.0737667083740234, 0.016205545514822006, -0.5354843139648438, 0.22468213737010956, -0.12962426245212555, 0.15973228216171265, -0.3298131823539734, 0.7421320676803589, 0.7455059885978699, -0.33846867084503174, 0.15806250274181366, -0.650601327419281, 0.37735670804977417, 0.5074166655540466, -1.2326009273529053, -0.10264017432928085, 0.1955960988998413, 0.33015429973602295, 0.383942186832428, 0.6346434354782104, -0.12264905869960785, 0.26178282499313354, 0.2271663397550583, -0.008391243405640125, 0.031615082174539566, 0.09732551127672195, -0.22398462891578674, 0.09351611137390137, -0.19734489917755127, -0.46778327226638794 ]
open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v4-7B
open-llm-leaderboard
2023-08-29T15:17:23Z
201
0
[ "region:us" ]
null
2023-08-29T15:16:23Z
--- pretty_name: Evaluation run of xzuyn/LLaMa-2-PeanutButter_v4-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [xzuyn/LLaMa-2-PeanutButter_v4-7B](https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v4-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v4-7B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T15:15:59.631802](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v4-7B/blob/main/results_2023-08-29T15%3A15%3A59.631802.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.4754535953456773,\n \"\ acc_stderr\": 0.03543074449128995,\n \"acc_norm\": 0.4793512530654778,\n\ \ \"acc_norm_stderr\": 0.03541409593269912,\n \"mc1\": 0.26805385556915545,\n\ \ \"mc1_stderr\": 0.015506204722834557,\n \"mc2\": 0.42310904021377665,\n\ \ \"mc2_stderr\": 0.015624011969941223\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.507679180887372,\n \"acc_stderr\": 0.014609667440892567,\n\ \ \"acc_norm\": 0.5486348122866894,\n \"acc_norm_stderr\": 0.014542104569955265\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6188010356502689,\n\ \ \"acc_stderr\": 0.004846886929763466,\n \"acc_norm\": 0.8078072097191794,\n\ \ \"acc_norm_stderr\": 0.003932184843841659\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.046482319871173156,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.046482319871173156\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45925925925925926,\n\ \ \"acc_stderr\": 0.04304979692464243,\n \"acc_norm\": 0.45925925925925926,\n\ \ \"acc_norm_stderr\": 0.04304979692464243\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.4276315789473684,\n \"acc_stderr\": 0.040260970832965585,\n\ \ \"acc_norm\": 0.4276315789473684,\n \"acc_norm_stderr\": 0.040260970832965585\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.47,\n\ \ \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \ \ \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.4867924528301887,\n \"acc_stderr\": 0.030762134874500482,\n\ \ \"acc_norm\": 0.4867924528301887,\n \"acc_norm_stderr\": 0.030762134874500482\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\"\ : 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.43352601156069365,\n\ \ \"acc_stderr\": 0.03778621079092056,\n \"acc_norm\": 0.43352601156069365,\n\ \ \"acc_norm_stderr\": 0.03778621079092056\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.58,\n\ \ \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146267,\n\ \ \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146267\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.041443118108781506,\n\ \ \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.041443118108781506\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.30158730158730157,\n \"acc_stderr\": 0.023636975996101796,\n \"\ acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.023636975996101796\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.29365079365079366,\n\ \ \"acc_stderr\": 0.04073524322147126,\n \"acc_norm\": 0.29365079365079366,\n\ \ \"acc_norm_stderr\": 0.04073524322147126\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5032258064516129,\n\ \ \"acc_stderr\": 0.028443414226438316,\n \"acc_norm\": 0.5032258064516129,\n\ \ \"acc_norm_stderr\": 0.028443414226438316\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.3694581280788177,\n \"acc_stderr\": 0.03395970381998573,\n\ \ \"acc_norm\": 0.3694581280788177,\n \"acc_norm_stderr\": 0.03395970381998573\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\"\ : 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6242424242424243,\n \"acc_stderr\": 0.03781887353205982,\n\ \ \"acc_norm\": 0.6242424242424243,\n \"acc_norm_stderr\": 0.03781887353205982\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5909090909090909,\n \"acc_stderr\": 0.03502975799413007,\n \"\ acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.03502975799413007\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7202072538860104,\n \"acc_stderr\": 0.032396370467357036,\n\ \ \"acc_norm\": 0.7202072538860104,\n \"acc_norm_stderr\": 0.032396370467357036\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4666666666666667,\n \"acc_stderr\": 0.025294608023986476,\n\ \ \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.025294608023986476\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2814814814814815,\n \"acc_stderr\": 0.027420019350945287,\n \ \ \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.027420019350945287\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.4579831932773109,\n \"acc_stderr\": 0.03236361111951941,\n \ \ \"acc_norm\": 0.4579831932773109,\n \"acc_norm_stderr\": 0.03236361111951941\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"\ acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.6385321100917432,\n \"acc_stderr\": 0.020598082009937374,\n \"\ acc_norm\": 0.6385321100917432,\n \"acc_norm_stderr\": 0.020598082009937374\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.375,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.375,\n\ \ \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\"\ : {\n \"acc\": 0.5686274509803921,\n \"acc_stderr\": 0.03476099060501636,\n\ \ \"acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.03476099060501636\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.5949367088607594,\n \"acc_stderr\": 0.03195514741370671,\n \ \ \"acc_norm\": 0.5949367088607594,\n \"acc_norm_stderr\": 0.03195514741370671\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5381165919282511,\n\ \ \"acc_stderr\": 0.03346015011973228,\n \"acc_norm\": 0.5381165919282511,\n\ \ \"acc_norm_stderr\": 0.03346015011973228\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5114503816793893,\n \"acc_stderr\": 0.043841400240780176,\n\ \ \"acc_norm\": 0.5114503816793893,\n \"acc_norm_stderr\": 0.043841400240780176\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5619834710743802,\n \"acc_stderr\": 0.04529146804435792,\n \"\ acc_norm\": 0.5619834710743802,\n \"acc_norm_stderr\": 0.04529146804435792\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4722222222222222,\n\ \ \"acc_stderr\": 0.04826217294139894,\n \"acc_norm\": 0.4722222222222222,\n\ \ \"acc_norm_stderr\": 0.04826217294139894\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.49693251533742333,\n \"acc_stderr\": 0.03928297078179663,\n\ \ \"acc_norm\": 0.49693251533742333,\n \"acc_norm_stderr\": 0.03928297078179663\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\ \ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \ \ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6019417475728155,\n \"acc_stderr\": 0.04846748253977239,\n\ \ \"acc_norm\": 0.6019417475728155,\n \"acc_norm_stderr\": 0.04846748253977239\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.688034188034188,\n\ \ \"acc_stderr\": 0.030351527323344948,\n \"acc_norm\": 0.688034188034188,\n\ \ \"acc_norm_stderr\": 0.030351527323344948\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6155810983397191,\n\ \ \"acc_stderr\": 0.01739568874281962,\n \"acc_norm\": 0.6155810983397191,\n\ \ \"acc_norm_stderr\": 0.01739568874281962\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.476878612716763,\n \"acc_stderr\": 0.026890297881303128,\n\ \ \"acc_norm\": 0.476878612716763,\n \"acc_norm_stderr\": 0.026890297881303128\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2994413407821229,\n\ \ \"acc_stderr\": 0.015318257745976708,\n \"acc_norm\": 0.2994413407821229,\n\ \ \"acc_norm_stderr\": 0.015318257745976708\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5261437908496732,\n \"acc_stderr\": 0.028590752958852387,\n\ \ \"acc_norm\": 0.5261437908496732,\n \"acc_norm_stderr\": 0.028590752958852387\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5787781350482315,\n\ \ \"acc_stderr\": 0.02804339985821063,\n \"acc_norm\": 0.5787781350482315,\n\ \ \"acc_norm_stderr\": 0.02804339985821063\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5061728395061729,\n \"acc_stderr\": 0.027818623962583295,\n\ \ \"acc_norm\": 0.5061728395061729,\n \"acc_norm_stderr\": 0.027818623962583295\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3723404255319149,\n \"acc_stderr\": 0.02883892147125146,\n \ \ \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.02883892147125146\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.36766623207301175,\n\ \ \"acc_stderr\": 0.012314845910071691,\n \"acc_norm\": 0.36766623207301175,\n\ \ \"acc_norm_stderr\": 0.012314845910071691\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5367647058823529,\n \"acc_stderr\": 0.030290619180485694,\n\ \ \"acc_norm\": 0.5367647058823529,\n \"acc_norm_stderr\": 0.030290619180485694\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.434640522875817,\n \"acc_stderr\": 0.02005426920072646,\n \ \ \"acc_norm\": 0.434640522875817,\n \"acc_norm_stderr\": 0.02005426920072646\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.509090909090909,\n\ \ \"acc_stderr\": 0.04788339768702861,\n \"acc_norm\": 0.509090909090909,\n\ \ \"acc_norm_stderr\": 0.04788339768702861\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.4897959183673469,\n \"acc_stderr\": 0.03200255347893783,\n\ \ \"acc_norm\": 0.4897959183673469,\n \"acc_norm_stderr\": 0.03200255347893783\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6169154228855721,\n\ \ \"acc_stderr\": 0.0343751933733825,\n \"acc_norm\": 0.6169154228855721,\n\ \ \"acc_norm_stderr\": 0.0343751933733825\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \ \ \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42168674698795183,\n\ \ \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.42168674698795183,\n\ \ \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7076023391812866,\n \"acc_stderr\": 0.03488647713457923,\n\ \ \"acc_norm\": 0.7076023391812866,\n \"acc_norm_stderr\": 0.03488647713457923\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26805385556915545,\n\ \ \"mc1_stderr\": 0.015506204722834557,\n \"mc2\": 0.42310904021377665,\n\ \ \"mc2_stderr\": 0.015624011969941223\n }\n}\n```" repo_url: https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v4-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|arc:challenge|25_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hellaswag|10_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:15:59.631802.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:15:59.631802.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T15_15_59.631802 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T15:15:59.631802.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T15:15:59.631802.parquet' - config_name: results data_files: - split: 2023_08_29T15_15_59.631802 path: - results_2023-08-29T15:15:59.631802.parquet - split: latest path: - results_2023-08-29T15:15:59.631802.parquet --- # Dataset Card for Evaluation run of xzuyn/LLaMa-2-PeanutButter_v4-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v4-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [xzuyn/LLaMa-2-PeanutButter_v4-7B](https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v4-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v4-7B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T15:15:59.631802](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v4-7B/blob/main/results_2023-08-29T15%3A15%3A59.631802.json): ```python { "all": { "acc": 0.4754535953456773, "acc_stderr": 0.03543074449128995, "acc_norm": 0.4793512530654778, "acc_norm_stderr": 0.03541409593269912, "mc1": 0.26805385556915545, "mc1_stderr": 0.015506204722834557, "mc2": 0.42310904021377665, "mc2_stderr": 0.015624011969941223 }, "harness|arc:challenge|25": { "acc": 0.507679180887372, "acc_stderr": 0.014609667440892567, "acc_norm": 0.5486348122866894, "acc_norm_stderr": 0.014542104569955265 }, "harness|hellaswag|10": { "acc": 0.6188010356502689, "acc_stderr": 0.004846886929763466, "acc_norm": 0.8078072097191794, "acc_norm_stderr": 0.003932184843841659 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45925925925925926, "acc_stderr": 0.04304979692464243, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464243 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4276315789473684, "acc_stderr": 0.040260970832965585, "acc_norm": 0.4276315789473684, "acc_norm_stderr": 0.040260970832965585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.030762134874500482, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.030762134874500482 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092056, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092056 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146267, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.041443118108781506, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.041443118108781506 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30158730158730157, "acc_stderr": 0.023636975996101796, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.023636975996101796 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147126, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147126 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5032258064516129, "acc_stderr": 0.028443414226438316, "acc_norm": 0.5032258064516129, "acc_norm_stderr": 0.028443414226438316 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3694581280788177, "acc_stderr": 0.03395970381998573, "acc_norm": 0.3694581280788177, "acc_norm_stderr": 0.03395970381998573 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6242424242424243, "acc_stderr": 0.03781887353205982, "acc_norm": 0.6242424242424243, "acc_norm_stderr": 0.03781887353205982 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413007, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413007 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7202072538860104, "acc_stderr": 0.032396370467357036, "acc_norm": 0.7202072538860104, "acc_norm_stderr": 0.032396370467357036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4666666666666667, "acc_stderr": 0.025294608023986476, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.025294608023986476 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945287, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945287 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4579831932773109, "acc_stderr": 0.03236361111951941, "acc_norm": 0.4579831932773109, "acc_norm_stderr": 0.03236361111951941 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6385321100917432, "acc_stderr": 0.020598082009937374, "acc_norm": 0.6385321100917432, "acc_norm_stderr": 0.020598082009937374 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.375, "acc_stderr": 0.033016908987210894, "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5686274509803921, "acc_stderr": 0.03476099060501636, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.03476099060501636 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5949367088607594, "acc_stderr": 0.03195514741370671, "acc_norm": 0.5949367088607594, "acc_norm_stderr": 0.03195514741370671 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5381165919282511, "acc_stderr": 0.03346015011973228, "acc_norm": 0.5381165919282511, "acc_norm_stderr": 0.03346015011973228 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5114503816793893, "acc_stderr": 0.043841400240780176, "acc_norm": 0.5114503816793893, "acc_norm_stderr": 0.043841400240780176 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5619834710743802, "acc_stderr": 0.04529146804435792, "acc_norm": 0.5619834710743802, "acc_norm_stderr": 0.04529146804435792 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.49693251533742333, "acc_stderr": 0.03928297078179663, "acc_norm": 0.49693251533742333, "acc_norm_stderr": 0.03928297078179663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.6019417475728155, "acc_stderr": 0.04846748253977239, "acc_norm": 0.6019417475728155, "acc_norm_stderr": 0.04846748253977239 }, "harness|hendrycksTest-marketing|5": { "acc": 0.688034188034188, "acc_stderr": 0.030351527323344948, "acc_norm": 0.688034188034188, "acc_norm_stderr": 0.030351527323344948 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6155810983397191, "acc_stderr": 0.01739568874281962, "acc_norm": 0.6155810983397191, "acc_norm_stderr": 0.01739568874281962 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.476878612716763, "acc_stderr": 0.026890297881303128, "acc_norm": 0.476878612716763, "acc_norm_stderr": 0.026890297881303128 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2994413407821229, "acc_stderr": 0.015318257745976708, "acc_norm": 0.2994413407821229, "acc_norm_stderr": 0.015318257745976708 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5261437908496732, "acc_stderr": 0.028590752958852387, "acc_norm": 0.5261437908496732, "acc_norm_stderr": 0.028590752958852387 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5787781350482315, "acc_stderr": 0.02804339985821063, "acc_norm": 0.5787781350482315, "acc_norm_stderr": 0.02804339985821063 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5061728395061729, "acc_stderr": 0.027818623962583295, "acc_norm": 0.5061728395061729, "acc_norm_stderr": 0.027818623962583295 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3723404255319149, "acc_stderr": 0.02883892147125146, "acc_norm": 0.3723404255319149, "acc_norm_stderr": 0.02883892147125146 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.36766623207301175, "acc_stderr": 0.012314845910071691, "acc_norm": 0.36766623207301175, "acc_norm_stderr": 0.012314845910071691 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5367647058823529, "acc_stderr": 0.030290619180485694, "acc_norm": 0.5367647058823529, "acc_norm_stderr": 0.030290619180485694 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.434640522875817, "acc_stderr": 0.02005426920072646, "acc_norm": 0.434640522875817, "acc_norm_stderr": 0.02005426920072646 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4897959183673469, "acc_stderr": 0.03200255347893783, "acc_norm": 0.4897959183673469, "acc_norm_stderr": 0.03200255347893783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6169154228855721, "acc_stderr": 0.0343751933733825, "acc_norm": 0.6169154228855721, "acc_norm_stderr": 0.0343751933733825 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.03844453181770917, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7076023391812866, "acc_stderr": 0.03488647713457923, "acc_norm": 0.7076023391812866, "acc_norm_stderr": 0.03488647713457923 }, "harness|truthfulqa:mc|0": { "mc1": 0.26805385556915545, "mc1_stderr": 0.015506204722834557, "mc2": 0.42310904021377665, "mc2_stderr": 0.015624011969941223 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6896567940711975, -0.8267604112625122, 0.26311662793159485, 0.2595573365688324, -0.1783888339996338, -0.001154544996097684, 0.030967524275183678, -0.27164793014526367, 0.6291429400444031, -0.10737017542123795, -0.49787774682044983, -0.6838284730911255, -0.4665459096431732, 0.21588024497032166, -0.061765823513269424, 0.8312932252883911, -0.19662368297576904, -0.1887308806180954, 0.09959924221038818, 0.026411233469843864, -0.24812421202659607, -0.35275501012802124, -0.5048357844352722, -0.3429112732410431, 0.14951325953006744, 0.4530719816684723, 0.5026569962501526, 0.8624668717384338, 0.673237681388855, 0.3051477074623108, -0.3472413122653961, -0.09086303412914276, -0.21297131478786469, -0.28167226910591125, 0.37987345457077026, -0.3873533606529236, -0.8410894870758057, 0.3121255040168762, 0.776906430721283, 0.6722972393035889, -0.09731494635343552, 0.31683477759361267, 0.012312645092606544, 0.5154978036880493, -0.279911607503891, 0.052565496414899826, -0.3118078410625458, 0.27490490674972534, -0.21477417647838593, -0.303404837846756, -0.24432601034641266, -0.2740427851676941, -0.10012539476156235, -0.9245771765708923, 0.26458460092544556, 0.3599541485309601, 1.6440117359161377, -0.1600920855998993, -0.23726500570774078, 0.09618322551250458, -0.07308981567621231, 1.0281145572662354, -0.8360118865966797, 0.32445892691612244, 0.783454418182373, 0.14444412291049957, -0.19377408921718597, -0.5940677523612976, -0.6672786474227905, 0.09616218507289886, -0.3646884858608246, 0.33933138847351074, -0.10122007131576538, -0.2211885005235672, 0.36473342776298523, 0.6852672696113586, -0.6463729739189148, 0.16222910583019257, -0.6359177231788635, -0.11945424973964691, 1.0513639450073242, 0.37992051243782043, 0.04600389674305916, -0.3932647109031677, -0.7097628712654114, -0.6371872425079346, -0.39706751704216003, 0.28359612822532654, 0.4287065863609314, 0.33920907974243164, -0.41969606280326843, 0.6715872883796692, -0.4394296407699585, 0.5144271850585938, 0.4455922245979309, -0.0029386754613369703, 0.9312286376953125, -0.6871898174285889, -0.5380875468254089, -0.02862664870917797, 1.1254395246505737, 0.6274815201759338, 0.09635179489850998, 0.2218359261751175, 0.06555859744548798, -0.08853279054164886, -0.025393592193722725, -0.8899719715118408, -0.3132466971874237, 0.1786397397518158, -0.38199806213378906, -0.574669361114502, 0.3303297758102417, -0.8691349029541016, 0.14079885184764862, 0.03371511399745941, 0.39984971284866333, -0.4597490131855011, -0.14343619346618652, 0.2987613081932068, -0.42644357681274414, 0.8170832991600037, -0.1952880620956421, -0.8445903658866882, 0.42429569363594055, 0.49481385946273804, 0.7711187601089478, -0.05714412033557892, -0.40023285150527954, -0.04377341642975807, -0.06583595275878906, -0.2852872908115387, 0.584102988243103, -0.2711796164512634, -0.4794235825538635, -0.335704505443573, 0.314726859331131, -0.23219463229179382, -0.35070139169692993, 0.7356906533241272, -0.20356835424900055, 0.15329396724700928, -0.4640788733959198, -0.6269264221191406, 0.11703628301620483, 0.4143720269203186, -0.3947455585002899, 1.3109588623046875, 0.26158756017684937, -0.8377670645713806, 0.4833225905895233, -0.5379200577735901, -0.1982899308204651, -0.05010939761996269, -0.10900424420833588, -0.8452473878860474, -0.2900655269622803, 0.19940988719463348, 0.42578306794166565, -0.17970259487628937, -0.1347387433052063, -0.42101946473121643, -0.3733575642108917, 0.3948248028755188, -0.18861818313598633, 1.2392817735671997, -0.09738773107528687, -0.7798783779144287, -0.1320640593767166, -1.2845228910446167, 0.32827627658843994, 0.31807827949523926, -0.39142367243766785, -0.12642839550971985, -0.4846021831035614, -0.04123525321483612, 0.1947963535785675, 0.2551153898239136, -0.807858943939209, 0.2721586227416992, -0.3579882085323334, 0.17250294983386993, 1.317710280418396, 0.014295177534222603, 0.12577129900455475, -0.5719179511070251, 0.5345470309257507, 0.2330709546804428, 0.15610873699188232, 0.38283565640449524, -0.6371697783470154, -0.8066399693489075, -0.48605799674987793, -0.031586747616529465, 0.618175745010376, -0.12051723152399063, 1.1620776653289795, 0.027555886656045914, -0.8886998295783997, -0.4608440697193146, -0.14705225825309753, 0.42785799503326416, 0.834080159664154, 0.5755399465560913, -0.024631058797240257, -0.6772873401641846, -1.0592180490493774, -0.2030528485774994, -0.1495247781276703, 0.17169761657714844, 0.2237626165151596, 0.9938216209411621, -0.22418974339962006, 0.6342138648033142, -1.0369877815246582, -0.16756242513656616, 0.1534295380115509, -0.13303810358047485, 0.8118154406547546, 0.7666773796081543, 0.6491973400115967, -0.6125264167785645, -0.5662919878959656, 0.22111400961875916, -0.9366382956504822, -0.09718998521566391, 0.11450894176959991, -0.33241018652915955, 0.04243645817041397, 0.10948466509580612, -0.719003438949585, 0.5812118053436279, 0.25872281193733215, -1.0852762460708618, 1.060636281967163, -0.28362929821014404, 0.5570051074028015, -1.0152686834335327, 0.17253610491752625, -0.09113382548093796, -0.015599100850522518, -0.4741743505001068, -0.002553126774728298, 0.051796525716781616, 0.4578894376754761, -0.45889225602149963, 0.781315803527832, -0.6848657131195068, -0.06540980190038681, 0.42116186022758484, 0.15771125257015228, -0.09475238621234894, 0.3458119332790375, -0.18627727031707764, 0.7853530645370483, 0.7991712689399719, -0.43904057145118713, 0.5436630249023438, 0.43699195981025696, -0.2607523798942566, 0.7558265924453735, -0.5329742431640625, -0.3438495397567749, 0.2891896069049835, -0.0265819001942873, -0.9012388586997986, -0.4658176600933075, 0.11021952331066132, -0.624401867389679, -0.1214771419763565, 0.37924835085868835, -0.27283990383148193, -0.8319615721702576, -0.9812468886375427, 0.3262336552143097, 0.7071546912193298, -0.453916996717453, -0.2180739939212799, 0.04722442477941513, 0.0728740468621254, -0.8178633451461792, -0.8875373601913452, -0.4863961637020111, -0.21339188516139984, -0.7310642004013062, 0.31524044275283813, -0.2615293562412262, -0.23840373754501343, -0.06481848657131195, -0.26706141233444214, -0.2919808626174927, 0.028044434264302254, 0.15628379583358765, 0.6459087133407593, -0.44856783747673035, -0.316554456949234, -0.16817829012870789, -0.2274709939956665, 0.25141751766204834, -0.10866623371839523, 0.3907448649406433, -0.4132923185825348, -0.37017256021499634, -0.46022874116897583, 0.020551195368170738, 0.6260464787483215, -0.021050790324807167, 0.7371714115142822, 0.4363309144973755, -0.3051655888557434, 0.040595538914203644, -0.23878182470798492, -0.2692834734916687, -0.5952383279800415, 0.28505057096481323, -0.5507394075393677, -1.0683966875076294, 0.7770016193389893, 0.5193042755126953, 0.0463910736143589, 1.0847504138946533, 0.611777663230896, -0.33256667852401733, 1.0362578630447388, 0.03920178860425949, 0.3146499991416931, 0.3680720925331116, -0.6913129687309265, 0.08653556555509567, -0.9354516863822937, -0.3241594135761261, -0.5637703537940979, -0.5173081159591675, -0.6859784722328186, -0.0820060521364212, 0.24823427200317383, 0.14629362523555756, -0.6772225499153137, 0.5708246827125549, -0.8240074515342712, 0.6169763207435608, 0.5409127473831177, 0.27020326256752014, 0.18929347395896912, -0.12604765594005585, -0.4133521616458893, -0.16139480471611023, -0.45547640323638916, -0.24695761501789093, 1.241787314414978, 0.29912346601486206, 0.7562960982322693, 0.09796976298093796, 0.8622342348098755, 0.03812631592154503, -0.07663855701684952, -0.5769170522689819, 0.6422394514083862, 0.1978498250246048, -0.8428441286087036, -0.3650604784488678, -0.5078040957450867, -1.1045105457305908, 0.4600312411785126, -0.16664427518844604, -0.9025656580924988, 0.13175778090953827, 0.04737270623445511, -0.23575077950954437, 0.5064966082572937, -0.5289581418037415, 0.8384708762168884, -0.14610262215137482, -0.4674384891986847, 0.11543191224336624, -0.8525156378746033, 0.4678303599357605, 0.21945583820343018, 0.21253107488155365, -0.02281528152525425, 0.22583240270614624, 1.1664552688598633, -0.8379174470901489, 0.4815063178539276, 0.1038767546415329, 0.024661893025040627, 0.3176962435245514, -0.16827239096164703, 0.5335080623626709, 0.1494111567735672, -0.04276667535305023, -0.10525032132863998, 0.2558632791042328, -0.876262366771698, -0.040237877517938614, 0.8903674483299255, -1.0045057535171509, -0.6446030139923096, -0.9385520219802856, -0.5015193819999695, 0.08676942437887192, 0.5758140087127686, 0.3835613429546356, 0.44829943776130676, 0.0017818092601373792, 0.4417249858379364, 0.8604502081871033, -0.08762058615684509, 0.5648155212402344, 0.2205151468515396, 0.05843557044863701, -0.6738389730453491, 0.8476746082305908, 0.07843136042356491, 0.36806395649909973, 0.28827422857284546, 0.40942659974098206, -0.5251927971839905, -0.2013983428478241, -0.23737266659736633, 0.5242981314659119, -0.6462975740432739, -0.3078695833683014, -0.4090840220451355, -0.3738009035587311, -0.7661615610122681, -0.6804069876670837, -0.27644601464271545, -0.5613657832145691, -0.5479825139045715, -0.5489718914031982, 0.6198636293411255, 0.42694637179374695, -0.41088026762008667, 0.1303669661283493, -0.44704291224479675, 0.2455981820821762, 0.32919153571128845, 0.5362350940704346, -0.35225436091423035, -0.606849193572998, 0.0690344050526619, -0.1551770567893982, -0.5647634267807007, -0.9804360866546631, 0.28832706809043884, -0.051243409514427185, 0.4752238392829895, 0.644936203956604, 0.009513934142887592, 0.9046270251274109, -0.2086903601884842, 0.9838663935661316, 0.3442792296409607, -0.8125091791152954, 0.7296843528747559, -0.3115975856781006, 0.16548901796340942, 0.6326425671577454, 0.20586392283439636, -0.23357020318508148, -0.6482927799224854, -1.2828254699707031, -0.796009361743927, 0.6745921969413757, 0.38322052359580994, -0.24193060398101807, 0.025241194292902946, 0.14282646775245667, -0.29076674580574036, -0.14999012649059296, -0.7020145654678345, -0.8966841697692871, -0.17864437401294708, -0.4769604504108429, 0.1599837690591812, 0.02980496548116207, -0.3536642789840698, -0.8522346019744873, 0.8888112306594849, 0.03714783862233162, 0.5784658789634705, 0.42674124240875244, 0.07101895660161972, 0.04209035262465477, 0.43975409865379333, 0.9112675189971924, 0.6981141567230225, -0.47516489028930664, 0.3776068091392517, 0.401845246553421, -1.0578354597091675, 0.5321300029754639, 0.3360896110534668, -0.10340612381696701, -0.04754914715886116, 0.4127700626850128, 0.4029946029186249, 0.09776829183101654, -0.1907951682806015, 0.6293544769287109, -0.03367476910352707, -0.5655546188354492, -0.40591463446617126, 0.06756369769573212, -0.08195232599973679, -0.00937607791274786, 0.35529497265815735, -0.17444685101509094, -0.05662209540605545, -0.47941017150878906, 0.4739324748516083, 0.35320258140563965, -0.44147789478302, -0.19644537568092346, 0.7637009024620056, -0.18867436051368713, -0.03427595645189285, 0.32748204469680786, -0.21774619817733765, -0.6112274527549744, 1.1018214225769043, 0.6908883452415466, 0.6209936141967773, -0.2827809453010559, -0.07925250381231308, 0.9162245988845825, 0.40724673867225647, 0.01109953224658966, 0.5423250198364258, 0.2843775451183319, -0.2933559715747833, 0.2281223088502884, -0.8426948189735413, -0.024077177047729492, 0.10090350359678268, -0.8029444217681885, 0.3141807019710541, -0.6145164370536804, -0.22411949932575226, 0.03403112664818764, 0.4340718686580658, -0.43463650345802307, 0.5454853177070618, -0.40831059217453003, 1.2206171751022339, -0.9682443141937256, 0.7136324644088745, 0.7220227718353271, -0.5535966753959656, -1.0154447555541992, -0.5601925849914551, 0.04235515743494034, -0.8761821389198303, 0.5497323870658875, -0.05996856838464737, 0.1912088245153427, -0.0681552141904831, -0.7027355432510376, -0.9730863571166992, 1.4237786531448364, -0.07812720537185669, -0.44999948143959045, 0.23894059658050537, -0.026759006083011627, 0.4021962881088257, 0.16959792375564575, 0.6261401772499084, 0.8034626841545105, 0.8138851523399353, -0.048131514340639114, -0.7174472808837891, 0.3850022554397583, -0.5019659996032715, -0.30594730377197266, 0.44648662209510803, -0.9777418971061707, 1.2126353979110718, -0.033575598150491714, 0.1689440906047821, -0.1571088582277298, 0.723526656627655, 0.8138240575790405, 0.19914114475250244, 0.33989012241363525, 0.9074521660804749, 0.9079276323318481, -0.49961671233177185, 0.982367753982544, -0.1804492175579071, 0.8565896153450012, 0.7041619420051575, 0.1728321611881256, 0.7665351629257202, 0.6590569019317627, -0.573005735874176, 0.5739858150482178, 0.8280680179595947, -0.3249785900115967, 0.3793923258781433, 0.30324724316596985, -0.14660602807998657, -0.12063007056713104, 0.44301366806030273, -0.905163586139679, 0.12076297402381897, 0.13787077367305756, -0.3402895927429199, 0.07584424316883087, -0.47954490780830383, 0.4138872027397156, -0.11345332860946655, -0.002192070707678795, 0.33332350850105286, 0.042114146053791046, -0.31202515959739685, 0.9336666464805603, -0.14250434935092926, 0.816670298576355, -0.5277347564697266, -0.052792854607105255, -0.3717239201068878, 0.603976309299469, -0.4660118520259857, -1.105224370956421, 0.14776909351348877, 0.06765327602624893, -0.1413840502500534, -0.08199473470449448, 0.6744886636734009, -0.17477774620056152, -0.806555449962616, 0.10489946603775024, 0.008335099555552006, 0.10816995054483414, 0.5475839972496033, -0.722511351108551, -0.25873735547065735, -0.06018980219960213, -0.5659557580947876, 0.09861775487661362, 0.3103168308734894, 0.28595927357673645, 0.5692360401153564, 0.6406937837600708, 0.16875478625297546, 0.45887893438339233, -0.5692178606987, 0.8255422115325928, -1.0972130298614502, -0.7450075149536133, -0.9168437719345093, 0.4432510733604431, -0.36226123571395874, -0.8763443827629089, 1.0235272645950317, 1.0272136926651, 0.9352303147315979, 0.04236998409032822, 0.6940735578536987, -0.397541880607605, 0.27085644006729126, -0.3513094484806061, 0.9407723546028137, -0.8246669769287109, -0.258600115776062, -0.2841733396053314, -0.6595036387443542, -0.414352148771286, 0.9135634899139404, -0.17304068803787231, 0.08722084760665894, 1.0554759502410889, 0.6760513186454773, -0.1418190747499466, 0.03340975195169449, -0.053186800330877304, 0.5852893590927124, 0.40401598811149597, 1.022400975227356, 0.6610840559005737, -0.8277571797370911, 0.3916623890399933, -0.5197418332099915, -0.449900358915329, -0.4408089220523834, -0.46974489092826843, -0.8900841474533081, -0.4242061972618103, -0.20446337759494781, -0.607681930065155, -0.15171338617801666, 1.039066195487976, 0.5001257061958313, -0.9010123014450073, -0.45342642068862915, -0.04525776207447052, 0.15951979160308838, -0.576614260673523, -0.4179745316505432, 0.7153981328010559, -0.07084028422832489, -0.5410972237586975, 0.1654876172542572, -0.12755633890628815, 0.2922322750091553, 0.1397039294242859, -0.37653419375419617, -0.7220233082771301, 0.0181867852807045, 0.4514496624469757, 0.3689379096031189, -0.6791942119598389, -0.6975003480911255, 0.2860885560512543, -0.5133249759674072, 0.510360062122345, -0.0443226583302021, -0.525835394859314, 0.03251677379012108, 0.6813942790031433, 0.48504820466041565, 0.679884672164917, -0.01889216899871826, 0.03663811460137367, -0.6911017894744873, 0.2361610382795334, -0.06165897101163864, 0.2845926582813263, -0.018598167225718498, -0.3272816240787506, 0.7319948673248291, 0.7017070651054382, -0.49345842003822327, -1.0681354999542236, -0.45107904076576233, -1.4702006578445435, 0.011143936775624752, 1.063010334968567, 0.0025055876467376947, -0.5555195212364197, 0.23555131256580353, -0.13203038275241852, 0.1506180316209793, -0.3579660654067993, 0.7499396204948425, 0.7501475214958191, -0.34505927562713623, 0.10153264552354813, -0.6104653477668762, 0.3487016558647156, 0.513215959072113, -1.224013328552246, -0.09716105461120605, 0.18212568759918213, 0.3718269169330597, 0.3396977186203003, 0.6919403076171875, -0.11949966847896576, 0.2951846420764923, 0.30814218521118164, 0.026576964184641838, 0.06197327375411987, 0.0886937603354454, -0.21376867592334747, 0.020647570490837097, -0.22630836069583893, -0.4882052540779114 ]
open-llm-leaderboard/details_IkariDev__Athena-tmp
open-llm-leaderboard
2023-08-29T15:52:05Z
201
0
[ "region:us" ]
null
2023-08-29T15:51:06Z
--- pretty_name: Evaluation run of IkariDev/Athena-tmp dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [IkariDev/Athena-tmp](https://huggingface.co/IkariDev/Athena-tmp) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_IkariDev__Athena-tmp\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T15:50:42.106753](https://huggingface.co/datasets/open-llm-leaderboard/details_IkariDev__Athena-tmp/blob/main/results_2023-08-29T15%3A50%3A42.106753.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.5888874553745688,\n \"\ acc_stderr\": 0.03407664559390293,\n \"acc_norm\": 0.5926858740874733,\n\ \ \"acc_norm_stderr\": 0.034057449595187576,\n \"mc1\": 0.38922888616891066,\n\ \ \"mc1_stderr\": 0.017068552680690328,\n \"mc2\": 0.5536706803409501,\n\ \ \"mc2_stderr\": 0.01611557269809252\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5674061433447098,\n \"acc_stderr\": 0.014478005694182531,\n\ \ \"acc_norm\": 0.5921501706484642,\n \"acc_norm_stderr\": 0.014361097288449696\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6218880701055567,\n\ \ \"acc_stderr\": 0.004839247332606038,\n \"acc_norm\": 0.8212507468631747,\n\ \ \"acc_norm_stderr\": 0.003823591814133031\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5185185185185185,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.5185185185185185,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.6381578947368421,\n \"acc_stderr\": 0.03910525752849726,\n\ \ \"acc_norm\": 0.6381578947368421,\n \"acc_norm_stderr\": 0.03910525752849726\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n\ \ \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \ \ \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6075471698113207,\n \"acc_stderr\": 0.030052580579557845,\n\ \ \"acc_norm\": 0.6075471698113207,\n \"acc_norm_stderr\": 0.030052580579557845\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7083333333333334,\n\ \ \"acc_stderr\": 0.03800968060554859,\n \"acc_norm\": 0.7083333333333334,\n\ \ \"acc_norm_stderr\": 0.03800968060554859\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n\ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5433526011560693,\n\ \ \"acc_stderr\": 0.03798106566014498,\n \"acc_norm\": 0.5433526011560693,\n\ \ \"acc_norm_stderr\": 0.03798106566014498\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n\ \ \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n\ \ \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.502127659574468,\n \"acc_stderr\": 0.03268572658667492,\n\ \ \"acc_norm\": 0.502127659574468,\n \"acc_norm_stderr\": 0.03268572658667492\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n\ \ \"acc_stderr\": 0.043727482902780064,\n \"acc_norm\": 0.3157894736842105,\n\ \ \"acc_norm_stderr\": 0.043727482902780064\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728763,\n\ \ \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728763\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.35185185185185186,\n \"acc_stderr\": 0.024594975128920938,\n \"\ acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.024594975128920938\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n\ \ \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n\ \ \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.6451612903225806,\n \"acc_stderr\": 0.02721888977330876,\n \"\ acc_norm\": 0.6451612903225806,\n \"acc_norm_stderr\": 0.02721888977330876\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.458128078817734,\n \"acc_stderr\": 0.03505630140785742,\n \"acc_norm\"\ : 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785742\n },\n\ \ \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\"\ : 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n\ \ \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.033175059300091805,\n\ \ \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.033175059300091805\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7727272727272727,\n \"acc_stderr\": 0.02985751567338642,\n \"\ acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.02985751567338642\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.025787723180723875,\n\ \ \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.025787723180723875\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6205128205128205,\n \"acc_stderr\": 0.024603626924097417,\n\ \ \"acc_norm\": 0.6205128205128205,\n \"acc_norm_stderr\": 0.024603626924097417\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616258,\n \ \ \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616258\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6134453781512605,\n \"acc_stderr\": 0.03163145807552378,\n \ \ \"acc_norm\": 0.6134453781512605,\n \"acc_norm_stderr\": 0.03163145807552378\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"\ acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7963302752293578,\n \"acc_stderr\": 0.017266742087630804,\n \"\ acc_norm\": 0.7963302752293578,\n \"acc_norm_stderr\": 0.017266742087630804\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896078,\n \"\ acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896078\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8137254901960784,\n \"acc_stderr\": 0.02732547096671632,\n \"\ acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.02732547096671632\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \ \ \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n\ \ \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n\ \ \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n\ \ \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.041032038305145124,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.041032038305145124\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n\ \ \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n\ \ \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.035590395316173425,\n\ \ \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.035590395316173425\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.36607142857142855,\n\ \ \"acc_stderr\": 0.0457237235873743,\n \"acc_norm\": 0.36607142857142855,\n\ \ \"acc_norm_stderr\": 0.0457237235873743\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\ \ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n\ \ \"acc_stderr\": 0.02514093595033545,\n \"acc_norm\": 0.8205128205128205,\n\ \ \"acc_norm_stderr\": 0.02514093595033545\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \ \ \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7790549169859514,\n\ \ \"acc_stderr\": 0.014836205167333567,\n \"acc_norm\": 0.7790549169859514,\n\ \ \"acc_norm_stderr\": 0.014836205167333567\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6445086705202312,\n \"acc_stderr\": 0.025770292082977247,\n\ \ \"acc_norm\": 0.6445086705202312,\n \"acc_norm_stderr\": 0.025770292082977247\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.44692737430167595,\n\ \ \"acc_stderr\": 0.01662803003964761,\n \"acc_norm\": 0.44692737430167595,\n\ \ \"acc_norm_stderr\": 0.01662803003964761\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.630718954248366,\n \"acc_stderr\": 0.027634176689602656,\n\ \ \"acc_norm\": 0.630718954248366,\n \"acc_norm_stderr\": 0.027634176689602656\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n\ \ \"acc_stderr\": 0.026236965881153262,\n \"acc_norm\": 0.6913183279742765,\n\ \ \"acc_norm_stderr\": 0.026236965881153262\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.025630824975621344,\n\ \ \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.025630824975621344\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.450354609929078,\n \"acc_stderr\": 0.02968010556502904,\n \ \ \"acc_norm\": 0.450354609929078,\n \"acc_norm_stderr\": 0.02968010556502904\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46284224250325945,\n\ \ \"acc_stderr\": 0.012734923579532063,\n \"acc_norm\": 0.46284224250325945,\n\ \ \"acc_norm_stderr\": 0.012734923579532063\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5698529411764706,\n \"acc_stderr\": 0.030074971917302875,\n\ \ \"acc_norm\": 0.5698529411764706,\n \"acc_norm_stderr\": 0.030074971917302875\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5882352941176471,\n \"acc_stderr\": 0.019910377463105935,\n \ \ \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.019910377463105935\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n\ \ \"acc_stderr\": 0.04582004841505417,\n \"acc_norm\": 0.6454545454545455,\n\ \ \"acc_norm_stderr\": 0.04582004841505417\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6530612244897959,\n \"acc_stderr\": 0.030472526026726492,\n\ \ \"acc_norm\": 0.6530612244897959,\n \"acc_norm_stderr\": 0.030472526026726492\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7213930348258707,\n\ \ \"acc_stderr\": 0.031700561834973086,\n \"acc_norm\": 0.7213930348258707,\n\ \ \"acc_norm_stderr\": 0.031700561834973086\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366255,\n \ \ \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366255\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n\ \ \"acc_stderr\": 0.03882310850890593,\n \"acc_norm\": 0.463855421686747,\n\ \ \"acc_norm_stderr\": 0.03882310850890593\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n\ \ \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38922888616891066,\n\ \ \"mc1_stderr\": 0.017068552680690328,\n \"mc2\": 0.5536706803409501,\n\ \ \"mc2_stderr\": 0.01611557269809252\n }\n}\n```" repo_url: https://huggingface.co/IkariDev/Athena-tmp leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|arc:challenge|25_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hellaswag|10_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:50:42.106753.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T15:50:42.106753.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T15_50_42.106753 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T15:50:42.106753.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T15:50:42.106753.parquet' - config_name: results data_files: - split: 2023_08_29T15_50_42.106753 path: - results_2023-08-29T15:50:42.106753.parquet - split: latest path: - results_2023-08-29T15:50:42.106753.parquet --- # Dataset Card for Evaluation run of IkariDev/Athena-tmp ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/IkariDev/Athena-tmp - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [IkariDev/Athena-tmp](https://huggingface.co/IkariDev/Athena-tmp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_IkariDev__Athena-tmp", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T15:50:42.106753](https://huggingface.co/datasets/open-llm-leaderboard/details_IkariDev__Athena-tmp/blob/main/results_2023-08-29T15%3A50%3A42.106753.json): ```python { "all": { "acc": 0.5888874553745688, "acc_stderr": 0.03407664559390293, "acc_norm": 0.5926858740874733, "acc_norm_stderr": 0.034057449595187576, "mc1": 0.38922888616891066, "mc1_stderr": 0.017068552680690328, "mc2": 0.5536706803409501, "mc2_stderr": 0.01611557269809252 }, "harness|arc:challenge|25": { "acc": 0.5674061433447098, "acc_stderr": 0.014478005694182531, "acc_norm": 0.5921501706484642, "acc_norm_stderr": 0.014361097288449696 }, "harness|hellaswag|10": { "acc": 0.6218880701055567, "acc_stderr": 0.004839247332606038, "acc_norm": 0.8212507468631747, "acc_norm_stderr": 0.003823591814133031 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6381578947368421, "acc_stderr": 0.03910525752849726, "acc_norm": 0.6381578947368421, "acc_norm_stderr": 0.03910525752849726 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6075471698113207, "acc_stderr": 0.030052580579557845, "acc_norm": 0.6075471698113207, "acc_norm_stderr": 0.030052580579557845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7083333333333334, "acc_stderr": 0.03800968060554859, "acc_norm": 0.7083333333333334, "acc_norm_stderr": 0.03800968060554859 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5433526011560693, "acc_stderr": 0.03798106566014498, "acc_norm": 0.5433526011560693, "acc_norm_stderr": 0.03798106566014498 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.502127659574468, "acc_stderr": 0.03268572658667492, "acc_norm": 0.502127659574468, "acc_norm_stderr": 0.03268572658667492 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780064, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780064 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728763, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.024594975128920938, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.024594975128920938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377562, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6451612903225806, "acc_stderr": 0.02721888977330876, "acc_norm": 0.6451612903225806, "acc_norm_stderr": 0.02721888977330876 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785742, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785742 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.033175059300091805, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.033175059300091805 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.02985751567338642, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.02985751567338642 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.025787723180723875, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.025787723180723875 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6205128205128205, "acc_stderr": 0.024603626924097417, "acc_norm": 0.6205128205128205, "acc_norm_stderr": 0.024603626924097417 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616258, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616258 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6134453781512605, "acc_stderr": 0.03163145807552378, "acc_norm": 0.6134453781512605, "acc_norm_stderr": 0.03163145807552378 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7963302752293578, "acc_stderr": 0.017266742087630804, "acc_norm": 0.7963302752293578, "acc_norm_stderr": 0.017266742087630804 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896078, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.02732547096671632, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.02732547096671632 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6564885496183206, "acc_stderr": 0.041649760719448786, "acc_norm": 0.6564885496183206, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.041032038305145124, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.041032038305145124 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.035590395316173425, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.035590395316173425 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.36607142857142855, "acc_stderr": 0.0457237235873743, "acc_norm": 0.36607142857142855, "acc_norm_stderr": 0.0457237235873743 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.02514093595033545, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.02514093595033545 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7790549169859514, "acc_stderr": 0.014836205167333567, "acc_norm": 0.7790549169859514, "acc_norm_stderr": 0.014836205167333567 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6445086705202312, "acc_stderr": 0.025770292082977247, "acc_norm": 0.6445086705202312, "acc_norm_stderr": 0.025770292082977247 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.44692737430167595, "acc_stderr": 0.01662803003964761, "acc_norm": 0.44692737430167595, "acc_norm_stderr": 0.01662803003964761 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.630718954248366, "acc_stderr": 0.027634176689602656, "acc_norm": 0.630718954248366, "acc_norm_stderr": 0.027634176689602656 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6913183279742765, "acc_stderr": 0.026236965881153262, "acc_norm": 0.6913183279742765, "acc_norm_stderr": 0.026236965881153262 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6944444444444444, "acc_stderr": 0.025630824975621344, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.025630824975621344 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.450354609929078, "acc_stderr": 0.02968010556502904, "acc_norm": 0.450354609929078, "acc_norm_stderr": 0.02968010556502904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46284224250325945, "acc_stderr": 0.012734923579532063, "acc_norm": 0.46284224250325945, "acc_norm_stderr": 0.012734923579532063 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5698529411764706, "acc_stderr": 0.030074971917302875, "acc_norm": 0.5698529411764706, "acc_norm_stderr": 0.030074971917302875 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5882352941176471, "acc_stderr": 0.019910377463105935, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.019910377463105935 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.04582004841505417, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.04582004841505417 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6530612244897959, "acc_stderr": 0.030472526026726492, "acc_norm": 0.6530612244897959, "acc_norm_stderr": 0.030472526026726492 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7213930348258707, "acc_stderr": 0.031700561834973086, "acc_norm": 0.7213930348258707, "acc_norm_stderr": 0.031700561834973086 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.039427724440366255, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366255 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890593, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890593 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.38922888616891066, "mc1_stderr": 0.017068552680690328, "mc2": 0.5536706803409501, "mc2_stderr": 0.01611557269809252 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7601831555366516, -0.8377779126167297, 0.3008557856082916, 0.19851082563400269, -0.2166142761707306, -0.085299551486969, 0.07370377331972122, -0.24335478246212006, 0.5994465947151184, -0.10194602608680725, -0.49308302998542786, -0.7626543641090393, -0.5077283382415771, 0.21742494404315948, -0.0005317431059665978, 0.7851396203041077, -0.1919945478439331, -0.1375376582145691, 0.10218778997659683, -0.06800384074449539, -0.25554946064949036, -0.31628116965293884, -0.5350136756896973, -0.33635276556015015, 0.15287870168685913, 0.44328033924102783, 0.45093590021133423, 0.8372992873191833, 0.6840007901191711, 0.29154056310653687, -0.3265283405780792, -0.06270044296979904, -0.16924850642681122, -0.27844107151031494, 0.36955025792121887, -0.3093956410884857, -0.8816298842430115, 0.31512659788131714, 0.798111617565155, 0.7156163454055786, -0.04882960021495819, 0.3571341633796692, 0.06065812706947327, 0.565251886844635, -0.32756760716438293, 0.06861579418182373, -0.24557512998580933, 0.2908504009246826, -0.22354693710803986, -0.32613450288772583, -0.2785671651363373, -0.30836766958236694, -0.07689718157052994, -0.9104167222976685, 0.22330203652381897, 0.3287156820297241, 1.5693905353546143, -0.15698760747909546, -0.2658606469631195, 0.10068610310554504, -0.11578335613012314, 1.0661338567733765, -0.8607094883918762, 0.3474763035774231, 0.8036351799964905, 0.15619756281375885, -0.1404176652431488, -0.5673608183860779, -0.643131673336029, 0.13589400053024292, -0.331267774105072, 0.3485091030597687, -0.043954357504844666, -0.2028287649154663, 0.37020638585090637, 0.6938139200210571, -0.6129674911499023, 0.17183028161525726, -0.6699672341346741, -0.13873493671417236, 1.0135866403579712, 0.4122028350830078, 0.07369304448366165, -0.34333252906799316, -0.6530784964561462, -0.6758794188499451, -0.38829731941223145, 0.2667046785354614, 0.44848453998565674, 0.3774143159389496, -0.4288250505924225, 0.7435319423675537, -0.4111593961715698, 0.5719630122184753, 0.44908347725868225, 0.0031015542335808277, 0.8984770774841309, -0.6167342066764832, -0.5544945001602173, -0.04097485914826393, 1.129258394241333, 0.6141433119773865, 0.04477812722325325, 0.21872733533382416, 0.10461776703596115, -0.08293179422616959, -0.027262309566140175, -0.8918583989143372, -0.2498859316110611, 0.1808568239212036, -0.42870181798934937, -0.5440906882286072, 0.34406086802482605, -0.9350182414054871, 0.14686745405197144, 0.004856751766055822, 0.4545004367828369, -0.47901731729507446, -0.16433554887771606, 0.3115611970424652, -0.3900679647922516, 0.8729005455970764, -0.22591890394687653, -0.8272978067398071, 0.4048681855201721, 0.5264984965324402, 0.7679320573806763, -0.0973021388053894, -0.4026864469051361, -0.05096586421132088, -0.06249180808663368, -0.28724151849746704, 0.558402955532074, -0.2987035810947418, -0.4917561709880829, -0.3036448359489441, 0.2720097303390503, -0.25884369015693665, -0.31569844484329224, 0.7656338810920715, -0.18725448846817017, 0.22116202116012573, -0.4772278368473053, -0.6578795909881592, 0.15479935705661774, 0.40772566199302673, -0.41532406210899353, 1.3149229288101196, 0.19270260632038116, -0.8866515159606934, 0.5181182026863098, -0.6149260401725769, -0.18436791002750397, -0.046713218092918396, -0.06287786364555359, -0.8680329918861389, -0.2975999414920807, 0.2119758427143097, 0.4232850670814514, -0.1648968607187271, -0.1602236032485962, -0.44500187039375305, -0.3336719572544098, 0.33639761805534363, -0.14203187823295593, 1.2280923128128052, -0.027361812070012093, -0.7654197812080383, -0.15319551527500153, -1.2734932899475098, 0.34584125876426697, 0.1991313397884369, -0.3980749845504761, -0.15850535035133362, -0.4838038384914398, -0.08104924112558365, 0.1530085802078247, 0.23000697791576385, -0.8456432223320007, 0.2787624001502991, -0.3777320981025696, 0.12128095328807831, 1.3036786317825317, 0.05533726513385773, 0.17091400921344757, -0.5817123055458069, 0.5143465995788574, 0.2416285127401352, 0.18571579456329346, 0.42710959911346436, -0.6246549487113953, -0.8278444409370422, -0.48712512850761414, -0.043106675148010254, 0.5906832218170166, -0.16228996217250824, 1.1612651348114014, 0.07794094830751419, -0.9225696921348572, -0.5017423033714294, -0.20150324702262878, 0.45012643933296204, 0.8741425275802612, 0.5777301788330078, -0.027062661945819855, -0.6443291306495667, -1.0644261837005615, -0.2984243333339691, -0.14693422615528107, 0.14053446054458618, 0.2741725444793701, 1.0097650289535522, -0.21215660870075226, 0.6365066170692444, -1.0476914644241333, -0.19743961095809937, 0.18312978744506836, -0.04839445650577545, 0.8116572499275208, 0.7936011552810669, 0.6356202960014343, -0.6160659193992615, -0.561837911605835, 0.16694724559783936, -0.9592785835266113, -0.0964697077870369, 0.17900894582271576, -0.3322767913341522, 0.06615258008241653, 0.11145542562007904, -0.7052347660064697, 0.5848957896232605, 0.2287018597126007, -1.1549068689346313, 1.0930233001708984, -0.3344407379627228, 0.6052425503730774, -1.105505108833313, 0.23490646481513977, -0.10163843631744385, 0.06010143831372261, -0.4692462980747223, 0.020613910630345345, 0.023871177807450294, 0.3866369426250458, -0.47519129514694214, 0.8217818140983582, -0.6888937950134277, -0.1161554828286171, 0.46644559502601624, 0.0957208052277565, -0.1489643156528473, 0.35128918290138245, -0.205344095826149, 0.7852767109870911, 0.8568524718284607, -0.5219465494155884, 0.5383244156837463, 0.41302791237831116, -0.2457675337791443, 0.8121890425682068, -0.5152164697647095, -0.3015775680541992, 0.2686097323894501, -0.10839827358722687, -0.8511201739311218, -0.488785982131958, 0.09291435778141022, -0.6209878325462341, -0.10384458303451538, 0.3547886610031128, -0.25865602493286133, -0.82034832239151, -0.9490206241607666, 0.37568700313568115, 0.7122315168380737, -0.4517371952533722, -0.14517509937286377, 0.04992852732539177, 0.0886886790394783, -0.8413538336753845, -0.8459686636924744, -0.49855080246925354, -0.24388733506202698, -0.7170066833496094, 0.2692440450191498, -0.2846423387527466, -0.266430526971817, -0.06653381884098053, -0.22970908880233765, -0.3084818422794342, 0.007014937698841095, 0.13378120958805084, 0.6392918229103088, -0.4488285183906555, -0.2857653498649597, -0.22400271892547607, -0.22964753210544586, 0.2301967442035675, -0.16262206435203552, 0.3955027759075165, -0.45011410117149353, -0.44742146134376526, -0.5217044353485107, 0.009026283398270607, 0.7474846243858337, -0.0416959673166275, 0.7974599003791809, 0.4090317189693451, -0.29669544100761414, 0.04892690107226372, -0.27055105566978455, -0.28426864743232727, -0.5884028077125549, 0.24152888357639313, -0.5541254281997681, -1.067453145980835, 0.8306341767311096, 0.5112317800521851, 0.08631398528814316, 1.1788138151168823, 0.6014330983161926, -0.32268795371055603, 1.0742765665054321, 0.06182313710451126, 0.32653287053108215, 0.3334123194217682, -0.6986207365989685, 0.09756829589605331, -0.9339464902877808, -0.3590579926967621, -0.614551305770874, -0.5215187668800354, -0.712496280670166, -0.034010861068964005, 0.28957924246788025, 0.11046193540096283, -0.7248786687850952, 0.5137778520584106, -0.8548341393470764, 0.5840731859207153, 0.5473663806915283, 0.28315097093582153, 0.15487603843212128, -0.14232701063156128, -0.4783603847026825, -0.19905327260494232, -0.4308699369430542, -0.2509004473686218, 1.281906247138977, 0.29289790987968445, 0.7719056606292725, 0.11565055698156357, 0.8692438006401062, 0.06568587571382523, -0.10477892309427261, -0.5828511714935303, 0.6566387414932251, 0.17327801883220673, -0.8213274478912354, -0.3976971507072449, -0.5204500555992126, -1.0681055784225464, 0.41040924191474915, -0.21232661604881287, -0.8398516178131104, 0.15454675257205963, 0.01651286892592907, -0.21643781661987305, 0.5817590355873108, -0.5011796951293945, 0.8223711848258972, -0.08505550026893616, -0.4787486493587494, 0.1341230571269989, -0.7827286124229431, 0.426396906375885, 0.2332683652639389, 0.22823193669319153, 0.023091105744242668, 0.2695721387863159, 1.187395691871643, -0.8566216230392456, 0.46121856570243835, 0.05512470752000809, 0.06332671642303467, 0.31690096855163574, -0.14687508344650269, 0.48404091596603394, 0.10725893825292587, -0.03659781441092491, -0.15424253046512604, 0.2636500895023346, -0.9221863746643066, -0.0027063898742198944, 0.882258951663971, -1.0121688842773438, -0.5820673108100891, -0.9518574476242065, -0.4967748820781708, 0.059977490454912186, 0.6082440614700317, 0.4033335745334625, 0.5493125319480896, -0.010197610594332218, 0.4657292068004608, 0.9272738099098206, -0.1385466605424881, 0.5835275650024414, 0.22865869104862213, 0.13552746176719666, -0.6821566224098206, 0.8185731172561646, 0.03480425477027893, 0.36904194951057434, 0.22795195877552032, 0.42513710260391235, -0.5527519583702087, -0.23454326391220093, -0.21420304477214813, 0.5282804369926453, -0.6261167526245117, -0.26901692152023315, -0.4010904133319855, -0.35649195313453674, -0.8046582937240601, -0.6685036420822144, -0.31052425503730774, -0.5347899198532104, -0.5349050760269165, -0.5328289270401001, 0.6102845668792725, 0.440012663602829, -0.40680667757987976, 0.06339070200920105, -0.44987058639526367, 0.27027809619903564, 0.40328752994537354, 0.6343563795089722, -0.36396315693855286, -0.5755597352981567, 0.1283113956451416, -0.14762821793556213, -0.5318642258644104, -0.9364953637123108, 0.271172434091568, -0.03597112372517586, 0.49131205677986145, 0.6662914752960205, 0.040690191090106964, 0.8700403571128845, -0.1735771894454956, 1.022227168083191, 0.3468894958496094, -0.7589484453201294, 0.7383986711502075, -0.35757800936698914, 0.22696959972381592, 0.6886031627655029, 0.1651943475008011, -0.21992258727550507, -0.6896764039993286, -1.2847729921340942, -0.7744625210762024, 0.6912451386451721, 0.3760858476161957, -0.25288939476013184, 0.03685087710618973, 0.16814389824867249, -0.30380791425704956, -0.21161724627017975, -0.6909874081611633, -0.9064309000968933, -0.15148234367370605, -0.5363579392433167, 0.18238943815231323, -0.0104424599558115, -0.37955281138420105, -0.8393481969833374, 0.8989884853363037, 0.038790080696344376, 0.5908850431442261, 0.45377787947654724, 0.11336204409599304, 0.049674395471811295, 0.48118501901626587, 0.9420441389083862, 0.7404317259788513, -0.4795915484428406, 0.3986523449420929, 0.4257656931877136, -1.1139494180679321, 0.4732521176338196, 0.3190969228744507, -0.10629988461732864, -0.05583185702562332, 0.42979076504707336, 0.39129436016082764, 0.059154536575078964, -0.13569438457489014, 0.5922826528549194, -0.06384637951850891, -0.5746872425079346, -0.4200539290904999, 0.02902107685804367, -0.12347890436649323, -0.03522990643978119, 0.4095115661621094, -0.14402128756046295, -0.05956222116947174, -0.5115840435028076, 0.4569820165634155, 0.3600923717021942, -0.49021661281585693, -0.16603395342826843, 0.7805240154266357, -0.199667289853096, -0.08767248690128326, 0.298223078250885, -0.15635159611701965, -0.5848493576049805, 1.1170016527175903, 0.6062906384468079, 0.685570478439331, -0.31821903586387634, -0.09896481782197952, 0.9828222393989563, 0.35914620757102966, -0.020802242681384087, 0.5769531726837158, 0.29769739508628845, -0.2567330002784729, 0.2092338353395462, -0.9241346120834351, -0.050801850855350494, 0.10093342512845993, -0.8186338543891907, 0.3124411106109619, -0.6129418611526489, -0.22618550062179565, 0.022275421768426895, 0.43954625725746155, -0.43731585144996643, 0.5446043610572815, -0.43019068241119385, 1.2439775466918945, -0.9757489562034607, 0.6815076470375061, 0.7611228823661804, -0.5311272740364075, -1.0491316318511963, -0.5724648237228394, -0.002112958813086152, -0.8033046722412109, 0.5579832792282104, -0.09489701688289642, 0.16945813596248627, -0.058611348271369934, -0.7226234078407288, -0.9183493852615356, 1.4087799787521362, -0.09489522874355316, -0.3477465510368347, 0.26643094420433044, 0.04733700677752495, 0.43963444232940674, 0.12138082832098007, 0.5924295783042908, 0.7521169185638428, 0.83817458152771, 0.012262260541319847, -0.7381083369255066, 0.3360648453235626, -0.488189697265625, -0.37082281708717346, 0.4760279953479767, -0.8977705240249634, 1.2086169719696045, -0.0036379981320351362, 0.18947935104370117, -0.1671786904335022, 0.6764795184135437, 0.8172686100006104, 0.28830164670944214, 0.3525340259075165, 1.005686640739441, 0.8447818756103516, -0.5214694738388062, 0.9788566827774048, -0.18701307475566864, 0.8596097230911255, 0.6694767475128174, 0.24501299858093262, 0.7554203867912292, 0.6529357433319092, -0.5281181335449219, 0.5298651456832886, 0.8164622187614441, -0.29472702741622925, 0.3968528211116791, 0.2114626169204712, -0.17156490683555603, -0.12048731744289398, 0.4507423937320709, -0.9149500727653503, 0.14323222637176514, 0.10260389000177383, -0.34159818291664124, 0.0532609298825264, -0.44529810547828674, 0.37568289041519165, -0.08379393070936203, -0.06533931940793991, 0.3892056345939636, 0.038809146732091904, -0.3862226903438568, 0.8994312882423401, -0.13568833470344543, 0.7899787425994873, -0.5207048654556274, -0.06181412935256958, -0.37332186102867126, 0.6349380016326904, -0.4291217029094696, -1.0940991640090942, 0.17843665182590485, 0.0715806856751442, -0.10867101699113846, -0.1444382667541504, 0.678766667842865, -0.19206953048706055, -0.8122355937957764, 0.07815428823232651, 0.006781791336834431, 0.09199045598506927, 0.5908669233322144, -0.7036086916923523, -0.33812183141708374, -0.07804761826992035, -0.5164275169372559, 0.0765567347407341, 0.32763057947158813, 0.33709678053855896, 0.5749460458755493, 0.654981791973114, 0.20598576962947845, 0.44407764077186584, -0.5805418491363525, 0.8038193583488464, -1.0807311534881592, -0.7669099569320679, -0.9255027770996094, 0.42149436473846436, -0.367511510848999, -0.8894578218460083, 1.0058033466339111, 1.0426349639892578, 0.8427242040634155, 0.024767229333519936, 0.6300243735313416, -0.38752126693725586, 0.34013834595680237, -0.36685991287231445, 1.0067130327224731, -0.8332861065864563, -0.26798921823501587, -0.2850280702114105, -0.7203932404518127, -0.4279061555862427, 0.8502088785171509, -0.20737536251544952, 0.06321696192026138, 1.0465431213378906, 0.6199610233306885, -0.16540776193141937, 0.0392155759036541, -0.08105488866567612, 0.581173837184906, 0.38775476813316345, 1.0135589838027954, 0.6151214838027954, -0.8164876103401184, 0.3720802068710327, -0.4707716703414917, -0.439309298992157, -0.394420862197876, -0.47828713059425354, -0.9198413491249084, -0.48174187541007996, -0.19489169120788574, -0.6149064302444458, -0.16367186605930328, 0.9707455039024353, 0.4698953330516815, -0.9340866804122925, -0.42450079321861267, -0.06890522688627243, 0.1309301108121872, -0.5644215941429138, -0.4209561049938202, 0.7974553108215332, -0.07315226644277573, -0.5498220920562744, 0.11704396456480026, -0.13541732728481293, 0.30129608511924744, 0.12123633921146393, -0.4008939862251282, -0.7158187031745911, 0.023552842438220978, 0.4234660565853119, 0.42333561182022095, -0.6411699056625366, -0.7378273606300354, 0.28268492221832275, -0.5318422317504883, 0.43083634972572327, -0.05013059452176094, -0.5169526934623718, 0.10751132667064667, 0.7065709829330444, 0.4761924743652344, 0.7068914175033569, -0.030980592593550682, 0.06125187128782272, -0.6489965319633484, 0.27846139669418335, 0.009426676668226719, 0.26287561655044556, -0.04261293634772301, -0.3001921772956848, 0.7335218191146851, 0.6815931797027588, -0.5051862597465515, -1.0743757486343384, -0.4415090084075928, -1.445186734199524, 0.023691242560744286, 1.0445548295974731, 0.03495227172970772, -0.5196971893310547, 0.20823737978935242, -0.09977846592664719, 0.17213121056556702, -0.30951425433158875, 0.8151357769966125, 0.7748576998710632, -0.341807097196579, 0.10871126502752304, -0.6301780939102173, 0.3938848376274109, 0.5323007106781006, -1.2041397094726562, -0.12918506562709808, 0.18922024965286255, 0.3486047089099884, 0.31504276394844055, 0.6574241518974304, -0.17893639206886292, 0.27446991205215454, 0.2542809545993805, 0.030024517327547073, 0.0200436320155859, 0.10724028944969177, -0.20787695050239563, 0.08151298016309738, -0.2319703996181488, -0.49819108843803406 ]
open-llm-leaderboard/details_nathan0__mpt_delta_tuned_model_v2
open-llm-leaderboard
2023-08-29T16:17:36Z
201
0
[ "region:us" ]
null
2023-08-29T16:16:36Z
--- pretty_name: Evaluation run of nathan0/mpt_delta_tuned_model_v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [nathan0/mpt_delta_tuned_model_v2](https://huggingface.co/nathan0/mpt_delta_tuned_model_v2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nathan0__mpt_delta_tuned_model_v2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T16:16:19.015155](https://huggingface.co/datasets/open-llm-leaderboard/details_nathan0__mpt_delta_tuned_model_v2/blob/main/results_2023-08-29T16%3A16%3A19.015155.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.2950832179561353,\n \"\ acc_stderr\": 0.0329561063051657,\n \"acc_norm\": 0.29907052345099405,\n\ \ \"acc_norm_stderr\": 0.03294521260985216,\n \"mc1\": 0.23623011015911874,\n\ \ \"mc1_stderr\": 0.014869755015871108,\n \"mc2\": 0.35471976554662815,\n\ \ \"mc2_stderr\": 0.013741277408130734\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.45819112627986347,\n \"acc_stderr\": 0.014560220308714697,\n\ \ \"acc_norm\": 0.5068259385665529,\n \"acc_norm_stderr\": 0.014610029151379813\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5774746066520613,\n\ \ \"acc_stderr\": 0.004929517011508221,\n \"acc_norm\": 0.7640908185620394,\n\ \ \"acc_norm_stderr\": 0.0042369801453443065\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2740740740740741,\n\ \ \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.2740740740740741,\n\ \ \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.2565789473684211,\n \"acc_stderr\": 0.0355418036802569,\n\ \ \"acc_norm\": 0.2565789473684211,\n \"acc_norm_stderr\": 0.0355418036802569\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.24,\n\ \ \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \ \ \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.3169811320754717,\n \"acc_stderr\": 0.028637235639800918,\n\ \ \"acc_norm\": 0.3169811320754717,\n \"acc_norm_stderr\": 0.028637235639800918\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3055555555555556,\n\ \ \"acc_stderr\": 0.03852084696008534,\n \"acc_norm\": 0.3055555555555556,\n\ \ \"acc_norm_stderr\": 0.03852084696008534\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n\ \ \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.21965317919075145,\n\ \ \"acc_stderr\": 0.03156809362703175,\n \"acc_norm\": 0.21965317919075145,\n\ \ \"acc_norm_stderr\": 0.03156809362703175\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n\ \ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n\ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.31063829787234043,\n \"acc_stderr\": 0.03025123757921317,\n\ \ \"acc_norm\": 0.31063829787234043,\n \"acc_norm_stderr\": 0.03025123757921317\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n\ \ \"acc_stderr\": 0.04303684033537314,\n \"acc_norm\": 0.2982456140350877,\n\ \ \"acc_norm_stderr\": 0.04303684033537314\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.31724137931034485,\n \"acc_stderr\": 0.03878352372138622,\n\ \ \"acc_norm\": 0.31724137931034485,\n \"acc_norm_stderr\": 0.03878352372138622\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3148148148148148,\n \"acc_stderr\": 0.023919984164047736,\n \"\ acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.023919984164047736\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.19047619047619047,\n\ \ \"acc_stderr\": 0.03512207412302052,\n \"acc_norm\": 0.19047619047619047,\n\ \ \"acc_norm_stderr\": 0.03512207412302052\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.2903225806451613,\n \"acc_stderr\": 0.025822106119415898,\n \"\ acc_norm\": 0.2903225806451613,\n \"acc_norm_stderr\": 0.025822106119415898\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.26108374384236455,\n \"acc_stderr\": 0.03090379695211449,\n \"\ acc_norm\": 0.26108374384236455,\n \"acc_norm_stderr\": 0.03090379695211449\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\"\ : 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03453131801885415,\n\ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03453131801885415\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.30303030303030304,\n \"acc_stderr\": 0.03274287914026868,\n \"\ acc_norm\": 0.30303030303030304,\n \"acc_norm_stderr\": 0.03274287914026868\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.31088082901554404,\n \"acc_stderr\": 0.03340361906276585,\n\ \ \"acc_norm\": 0.31088082901554404,\n \"acc_norm_stderr\": 0.03340361906276585\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.30512820512820515,\n \"acc_stderr\": 0.023346335293325887,\n\ \ \"acc_norm\": 0.30512820512820515,\n \"acc_norm_stderr\": 0.023346335293325887\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.27037037037037037,\n \"acc_stderr\": 0.027080372815145675,\n \ \ \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.027080372815145675\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.029344572500634346,\n\ \ \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.029344572500634346\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"\ acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.26972477064220185,\n \"acc_stderr\": 0.019028486711115445,\n \"\ acc_norm\": 0.26972477064220185,\n \"acc_norm_stderr\": 0.019028486711115445\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.18518518518518517,\n \"acc_stderr\": 0.02649191472735514,\n \"\ acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.02649191472735514\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604246,\n \"\ acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604246\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598035,\n \ \ \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598035\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3632286995515695,\n\ \ \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.3632286995515695,\n\ \ \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2824427480916031,\n \"acc_stderr\": 0.03948406125768361,\n\ \ \"acc_norm\": 0.2824427480916031,\n \"acc_norm_stderr\": 0.03948406125768361\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.3884297520661157,\n \"acc_stderr\": 0.04449270350068382,\n \"\ acc_norm\": 0.3884297520661157,\n \"acc_norm_stderr\": 0.04449270350068382\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.37037037037037035,\n\ \ \"acc_stderr\": 0.04668408033024932,\n \"acc_norm\": 0.37037037037037035,\n\ \ \"acc_norm_stderr\": 0.04668408033024932\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.2822085889570552,\n \"acc_stderr\": 0.03536117886664743,\n\ \ \"acc_norm\": 0.2822085889570552,\n \"acc_norm_stderr\": 0.03536117886664743\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.36607142857142855,\n\ \ \"acc_stderr\": 0.045723723587374296,\n \"acc_norm\": 0.36607142857142855,\n\ \ \"acc_norm_stderr\": 0.045723723587374296\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.2815533980582524,\n \"acc_stderr\": 0.044532548363264673,\n\ \ \"acc_norm\": 0.2815533980582524,\n \"acc_norm_stderr\": 0.044532548363264673\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.3247863247863248,\n\ \ \"acc_stderr\": 0.03067902276549883,\n \"acc_norm\": 0.3247863247863248,\n\ \ \"acc_norm_stderr\": 0.03067902276549883\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768077,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768077\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.29757343550446996,\n\ \ \"acc_stderr\": 0.01634911191290943,\n \"acc_norm\": 0.29757343550446996,\n\ \ \"acc_norm_stderr\": 0.01634911191290943\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.2774566473988439,\n \"acc_stderr\": 0.024105712607754307,\n\ \ \"acc_norm\": 0.2774566473988439,\n \"acc_norm_stderr\": 0.024105712607754307\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.2679738562091503,\n \"acc_stderr\": 0.02536060379624256,\n\ \ \"acc_norm\": 0.2679738562091503,\n \"acc_norm_stderr\": 0.02536060379624256\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2861736334405145,\n\ \ \"acc_stderr\": 0.025670259242188936,\n \"acc_norm\": 0.2861736334405145,\n\ \ \"acc_norm_stderr\": 0.025670259242188936\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2839506172839506,\n \"acc_stderr\": 0.02508947852376513,\n\ \ \"acc_norm\": 0.2839506172839506,\n \"acc_norm_stderr\": 0.02508947852376513\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2765957446808511,\n \"acc_stderr\": 0.026684564340461,\n \ \ \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340461\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25684485006518903,\n\ \ \"acc_stderr\": 0.011158455853098857,\n \"acc_norm\": 0.25684485006518903,\n\ \ \"acc_norm_stderr\": 0.011158455853098857\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.20220588235294118,\n \"acc_stderr\": 0.02439819298665492,\n\ \ \"acc_norm\": 0.20220588235294118,\n \"acc_norm_stderr\": 0.02439819298665492\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.31209150326797386,\n \"acc_stderr\": 0.018745011201277657,\n \ \ \"acc_norm\": 0.31209150326797386,\n \"acc_norm_stderr\": 0.018745011201277657\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.36363636363636365,\n\ \ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.36363636363636365,\n\ \ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.31020408163265306,\n \"acc_stderr\": 0.029613459872484378,\n\ \ \"acc_norm\": 0.31020408163265306,\n \"acc_norm_stderr\": 0.029613459872484378\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23383084577114427,\n\ \ \"acc_stderr\": 0.029929415408348398,\n \"acc_norm\": 0.23383084577114427,\n\ \ \"acc_norm_stderr\": 0.029929415408348398\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3493975903614458,\n\ \ \"acc_stderr\": 0.0371172519074075,\n \"acc_norm\": 0.3493975903614458,\n\ \ \"acc_norm_stderr\": 0.0371172519074075\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.26900584795321636,\n \"acc_stderr\": 0.03401052620104089,\n\ \ \"acc_norm\": 0.26900584795321636,\n \"acc_norm_stderr\": 0.03401052620104089\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23623011015911874,\n\ \ \"mc1_stderr\": 0.014869755015871108,\n \"mc2\": 0.35471976554662815,\n\ \ \"mc2_stderr\": 0.013741277408130734\n }\n}\n```" repo_url: https://huggingface.co/nathan0/mpt_delta_tuned_model_v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|arc:challenge|25_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hellaswag|10_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T16:16:19.015155.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T16:16:19.015155.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T16_16_19.015155 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T16:16:19.015155.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T16:16:19.015155.parquet' - config_name: results data_files: - split: 2023_08_29T16_16_19.015155 path: - results_2023-08-29T16:16:19.015155.parquet - split: latest path: - results_2023-08-29T16:16:19.015155.parquet --- # Dataset Card for Evaluation run of nathan0/mpt_delta_tuned_model_v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/nathan0/mpt_delta_tuned_model_v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [nathan0/mpt_delta_tuned_model_v2](https://huggingface.co/nathan0/mpt_delta_tuned_model_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nathan0__mpt_delta_tuned_model_v2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T16:16:19.015155](https://huggingface.co/datasets/open-llm-leaderboard/details_nathan0__mpt_delta_tuned_model_v2/blob/main/results_2023-08-29T16%3A16%3A19.015155.json): ```python { "all": { "acc": 0.2950832179561353, "acc_stderr": 0.0329561063051657, "acc_norm": 0.29907052345099405, "acc_norm_stderr": 0.03294521260985216, "mc1": 0.23623011015911874, "mc1_stderr": 0.014869755015871108, "mc2": 0.35471976554662815, "mc2_stderr": 0.013741277408130734 }, "harness|arc:challenge|25": { "acc": 0.45819112627986347, "acc_stderr": 0.014560220308714697, "acc_norm": 0.5068259385665529, "acc_norm_stderr": 0.014610029151379813 }, "harness|hellaswag|10": { "acc": 0.5774746066520613, "acc_stderr": 0.004929517011508221, "acc_norm": 0.7640908185620394, "acc_norm_stderr": 0.0042369801453443065 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.0355418036802569, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3169811320754717, "acc_stderr": 0.028637235639800918, "acc_norm": 0.3169811320754717, "acc_norm_stderr": 0.028637235639800918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03852084696008534, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03852084696008534 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.21965317919075145, "acc_stderr": 0.03156809362703175, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.03156809362703175 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.31063829787234043, "acc_stderr": 0.03025123757921317, "acc_norm": 0.31063829787234043, "acc_norm_stderr": 0.03025123757921317 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537314, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537314 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.31724137931034485, "acc_stderr": 0.03878352372138622, "acc_norm": 0.31724137931034485, "acc_norm_stderr": 0.03878352372138622 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.023919984164047736, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.023919984164047736 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.19047619047619047, "acc_stderr": 0.03512207412302052, "acc_norm": 0.19047619047619047, "acc_norm_stderr": 0.03512207412302052 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2903225806451613, "acc_stderr": 0.025822106119415898, "acc_norm": 0.2903225806451613, "acc_norm_stderr": 0.025822106119415898 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.03090379695211449, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.03090379695211449 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885415, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885415 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.30303030303030304, "acc_stderr": 0.03274287914026868, "acc_norm": 0.30303030303030304, "acc_norm_stderr": 0.03274287914026868 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.31088082901554404, "acc_stderr": 0.03340361906276585, "acc_norm": 0.31088082901554404, "acc_norm_stderr": 0.03340361906276585 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.30512820512820515, "acc_stderr": 0.023346335293325887, "acc_norm": 0.30512820512820515, "acc_norm_stderr": 0.023346335293325887 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.027080372815145675, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.027080372815145675 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2857142857142857, "acc_stderr": 0.029344572500634346, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.029344572500634346 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.26972477064220185, "acc_stderr": 0.019028486711115445, "acc_norm": 0.26972477064220185, "acc_norm_stderr": 0.019028486711115445 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.18518518518518517, "acc_stderr": 0.02649191472735514, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.02649191472735514 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604246, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598035, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598035 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3632286995515695, "acc_stderr": 0.032277904428505, "acc_norm": 0.3632286995515695, "acc_norm_stderr": 0.032277904428505 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3884297520661157, "acc_stderr": 0.04449270350068382, "acc_norm": 0.3884297520661157, "acc_norm_stderr": 0.04449270350068382 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.37037037037037035, "acc_stderr": 0.04668408033024932, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.04668408033024932 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2822085889570552, "acc_stderr": 0.03536117886664743, "acc_norm": 0.2822085889570552, "acc_norm_stderr": 0.03536117886664743 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.36607142857142855, "acc_stderr": 0.045723723587374296, "acc_norm": 0.36607142857142855, "acc_norm_stderr": 0.045723723587374296 }, "harness|hendrycksTest-management|5": { "acc": 0.2815533980582524, "acc_stderr": 0.044532548363264673, "acc_norm": 0.2815533980582524, "acc_norm_stderr": 0.044532548363264673 }, "harness|hendrycksTest-marketing|5": { "acc": 0.3247863247863248, "acc_stderr": 0.03067902276549883, "acc_norm": 0.3247863247863248, "acc_norm_stderr": 0.03067902276549883 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.29757343550446996, "acc_stderr": 0.01634911191290943, "acc_norm": 0.29757343550446996, "acc_norm_stderr": 0.01634911191290943 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2774566473988439, "acc_stderr": 0.024105712607754307, "acc_norm": 0.2774566473988439, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2679738562091503, "acc_stderr": 0.02536060379624256, "acc_norm": 0.2679738562091503, "acc_norm_stderr": 0.02536060379624256 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2861736334405145, "acc_stderr": 0.025670259242188936, "acc_norm": 0.2861736334405145, "acc_norm_stderr": 0.025670259242188936 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2839506172839506, "acc_stderr": 0.02508947852376513, "acc_norm": 0.2839506172839506, "acc_norm_stderr": 0.02508947852376513 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340461, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340461 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25684485006518903, "acc_stderr": 0.011158455853098857, "acc_norm": 0.25684485006518903, "acc_norm_stderr": 0.011158455853098857 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20220588235294118, "acc_stderr": 0.02439819298665492, "acc_norm": 0.20220588235294118, "acc_norm_stderr": 0.02439819298665492 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.31209150326797386, "acc_stderr": 0.018745011201277657, "acc_norm": 0.31209150326797386, "acc_norm_stderr": 0.018745011201277657 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.36363636363636365, "acc_stderr": 0.04607582090719976, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.31020408163265306, "acc_stderr": 0.029613459872484378, "acc_norm": 0.31020408163265306, "acc_norm_stderr": 0.029613459872484378 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.029929415408348398, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.029929415408348398 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.3493975903614458, "acc_stderr": 0.0371172519074075, "acc_norm": 0.3493975903614458, "acc_norm_stderr": 0.0371172519074075 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.26900584795321636, "acc_stderr": 0.03401052620104089, "acc_norm": 0.26900584795321636, "acc_norm_stderr": 0.03401052620104089 }, "harness|truthfulqa:mc|0": { "mc1": 0.23623011015911874, "mc1_stderr": 0.014869755015871108, "mc2": 0.35471976554662815, "mc2_stderr": 0.013741277408130734 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7543187141418457, -0.8137980699539185, 0.27838805317878723, 0.213060200214386, -0.21015006303787231, -0.1101338192820549, 0.03432779014110565, -0.18106254935264587, 0.5814398527145386, -0.11605701595544815, -0.5195620059967041, -0.677325963973999, -0.46004733443260193, 0.19805970788002014, 0.03752150014042854, 0.7957309484481812, -0.17527350783348083, -0.10619743168354034, 0.11900098621845245, 0.015935229137539864, -0.25163137912750244, -0.35744303464889526, -0.5952072143554688, -0.34825995564460754, 0.17980535328388214, 0.3773455321788788, 0.45283737778663635, 0.878960132598877, 0.6920398473739624, 0.2984008193016052, -0.3546350598335266, -0.04715593531727791, -0.15546905994415283, -0.30242106318473816, 0.3808063864707947, -0.32122811675071716, -0.884259045124054, 0.3258592188358307, 0.7611311078071594, 0.6266611814498901, -0.04448849335312843, 0.29190072417259216, 0.016083816066384315, 0.5556164979934692, -0.32895830273628235, 0.11512194573879242, -0.292309433221817, 0.22587977349758148, -0.17756974697113037, -0.33721527457237244, -0.2888626158237457, -0.26362887024879456, -0.11083719879388809, -0.8723716139793396, 0.25108107924461365, 0.24031813442707062, 1.5832138061523438, -0.1290782243013382, -0.31872737407684326, 0.1204448789358139, -0.1246982291340828, 0.988778829574585, -0.8860010504722595, 0.37577277421951294, 0.7696138024330139, 0.17590931057929993, -0.1277085691690445, -0.667432963848114, -0.6503263115882874, 0.12836284935474396, -0.3702133893966675, 0.40360331535339355, -0.04409782588481903, -0.1793515682220459, 0.3736712336540222, 0.6633777618408203, -0.6200377941131592, 0.1878291517496109, -0.6519846320152283, -0.15843415260314941, 1.0218616724014282, 0.36889228224754333, 0.032131947576999664, -0.36829516291618347, -0.6907903552055359, -0.6416919231414795, -0.3949037194252014, 0.27071455121040344, 0.44590696692466736, 0.42394328117370605, -0.39033398032188416, 0.677347719669342, -0.417772501707077, 0.5556052923202515, 0.44574251770973206, 0.09088452160358429, 0.8701747059822083, -0.6993373036384583, -0.5792617201805115, -0.08597468584775925, 1.0924808979034424, 0.6065151691436768, 0.08683238923549652, 0.23989909887313843, 0.03327659144997597, -0.12430914491415024, -0.019563106819987297, -0.9232514500617981, -0.3243458867073059, 0.1594168096780777, -0.37490731477737427, -0.4771336317062378, 0.38945111632347107, -0.8786004781723022, 0.15372292697429657, -0.05847223475575447, 0.42659080028533936, -0.4673895239830017, -0.1662278175354004, 0.2606806755065918, -0.40353915095329285, 0.8697288632392883, -0.2306085079908371, -0.8795747756958008, 0.349273681640625, 0.5378289222717285, 0.7754398584365845, -0.037830669432878494, -0.4111383259296417, -0.06844756752252579, -0.11674126237630844, -0.24219834804534912, 0.5277080535888672, -0.23435622453689575, -0.41814106702804565, -0.3064205050468445, 0.3034658133983612, -0.282643586397171, -0.32949399948120117, 0.7540600299835205, -0.2270527333021164, 0.16667820513248444, -0.5302209854125977, -0.6375128030776978, 0.16708719730377197, 0.3723439872264862, -0.40216001868247986, 1.2568317651748657, 0.2642843723297119, -0.8217822313308716, 0.5153200626373291, -0.5960265398025513, -0.1357133835554123, -0.028425872325897217, -0.052193962037563324, -0.8379043340682983, -0.28035861253738403, 0.22310377657413483, 0.40683212876319885, -0.11734335869550705, -0.13022033870220184, -0.38645291328430176, -0.3602462708950043, 0.3154427409172058, -0.2084667980670929, 1.234501600265503, -0.02912534773349762, -0.768583357334137, -0.08399343490600586, -1.2319543361663818, 0.3474139869213104, 0.17924800515174866, -0.4294939339160919, -0.10039345175027847, -0.4941100776195526, -0.03354376554489136, 0.19770756363868713, 0.2508876621723175, -0.7611416578292847, 0.278767466545105, -0.36928123235702515, 0.18524806201457977, 1.3367125988006592, 0.054462723433971405, 0.12930436432361603, -0.5556434392929077, 0.5004890561103821, 0.27814775705337524, 0.1773117184638977, 0.39236748218536377, -0.5821396708488464, -0.8317123055458069, -0.507533073425293, 0.006525331176817417, 0.5882020592689514, -0.19921383261680603, 1.1547247171401978, 0.0864822193980217, -0.8755610585212708, -0.4377501904964447, -0.19733832776546478, 0.43884268403053284, 0.815459132194519, 0.5993949770927429, -0.01663406938314438, -0.6664186716079712, -1.1175551414489746, -0.3076980412006378, -0.13326385617256165, 0.12356506288051605, 0.22633187472820282, 0.9977413415908813, -0.23485319316387177, 0.6091780662536621, -0.9992588758468628, -0.1279279589653015, 0.22228343784809113, -0.06011702120304108, 0.799359917640686, 0.795951247215271, 0.6120818853378296, -0.656385064125061, -0.5667901039123535, 0.1676619052886963, -0.9271245002746582, -0.06816189736127853, 0.1034473404288292, -0.32618576288223267, 0.08251570165157318, 0.17195278406143188, -0.7221972346305847, 0.5429332256317139, 0.19561120867729187, -1.143952488899231, 1.0629732608795166, -0.31723320484161377, 0.546576976776123, -1.069713830947876, 0.18918728828430176, -0.038488853722810745, 0.013980620540678501, -0.4786728322505951, 0.0493287518620491, 0.0859350636601448, 0.4681612253189087, -0.5108232498168945, 0.7852888107299805, -0.713266909122467, -0.049732573330402374, 0.42899373173713684, 0.09316325187683105, -0.19782216846942902, 0.3837149143218994, -0.20854176580905914, 0.8332484364509583, 0.8020098209381104, -0.5062375664710999, 0.548003613948822, 0.4105145335197449, -0.23086172342300415, 0.7569108605384827, -0.48206305503845215, -0.29573920369148254, 0.26253682374954224, -0.08202297240495682, -0.8330677151679993, -0.48228535056114197, 0.050532370805740356, -0.6130627393722534, -0.08121952414512634, 0.37252023816108704, -0.26115068793296814, -0.8248514533042908, -0.9095556735992432, 0.3359377384185791, 0.6630880236625671, -0.4600556194782257, -0.20780885219573975, 0.014858221635222435, 0.07757037878036499, -0.7907193303108215, -0.8536748886108398, -0.5108709335327148, -0.21687912940979004, -0.672954261302948, 0.2863752245903015, -0.2799859642982483, -0.2540513575077057, -0.09383174777030945, -0.27909302711486816, -0.3194441497325897, -0.01263716071844101, 0.10853026807308197, 0.6279230713844299, -0.4783763289451599, -0.28778186440467834, -0.21289993822574615, -0.21625332534313202, 0.24269893765449524, -0.18613573908805847, 0.4080215096473694, -0.5212163925170898, -0.3621676564216614, -0.5060973167419434, -0.02431316301226616, 0.7410387992858887, -0.040126338601112366, 0.8079890608787537, 0.4512158930301666, -0.3055224120616913, 0.040622279047966, -0.21484258770942688, -0.3293384611606598, -0.5950249433517456, 0.30232885479927063, -0.5541953444480896, -1.084580898284912, 0.789356529712677, 0.4944117069244385, 0.0452122837305069, 1.173440933227539, 0.6699691414833069, -0.323550283908844, 1.050688624382019, 0.06347344815731049, 0.39304912090301514, 0.3814920485019684, -0.6699583530426025, 0.09153223037719727, -0.9398635029792786, -0.33477386832237244, -0.5480920076370239, -0.5367968678474426, -0.7438684701919556, -0.02665269561111927, 0.2840990126132965, 0.17338870465755463, -0.7828150987625122, 0.5878661870956421, -0.836037814617157, 0.6214157938957214, 0.6273185014724731, 0.30291709303855896, 0.14027369022369385, -0.16312801837921143, -0.47046133875846863, -0.20670893788337708, -0.45271146297454834, -0.2310868352651596, 1.2496668100357056, 0.2827519178390503, 0.6904476284980774, 0.15475605428218842, 0.8130896091461182, 0.0395338200032711, -0.07935655862092972, -0.6220353841781616, 0.6366353631019592, 0.1725100874900818, -0.843970775604248, -0.3976413309574127, -0.541080892086029, -1.0543311834335327, 0.40518423914909363, -0.1675577312707901, -0.8501945734024048, 0.14161109924316406, 0.05313454568386078, -0.21981215476989746, 0.5177446007728577, -0.5377388000488281, 0.8471438884735107, -0.12555593252182007, -0.47210177779197693, 0.18962979316711426, -0.8240323066711426, 0.4610912799835205, 0.1863579899072647, 0.14169220626354218, 0.07944656163454056, 0.2766483724117279, 1.14677095413208, -0.8597428798675537, 0.43129217624664307, 0.05327223613858223, 0.06561490148305893, 0.29726049304008484, -0.17124994099140167, 0.542453408241272, 0.13867606222629547, -0.032268449664115906, -0.1723514199256897, 0.2961922585964203, -0.8958978056907654, -0.050242047756910324, 0.9183687567710876, -0.9910858869552612, -0.6022305488586426, -0.9415794610977173, -0.49997320771217346, 0.04390154033899307, 0.6036587953567505, 0.403988242149353, 0.4886542856693268, 0.0654086098074913, 0.48038798570632935, 0.8269469738006592, -0.09896565973758698, 0.6432360410690308, 0.22403638064861298, 0.1198611631989479, -0.707096517086029, 0.8243937492370605, 0.06613129377365112, 0.37593597173690796, 0.24582505226135254, 0.3864707052707672, -0.582618772983551, -0.23171132802963257, -0.18087540566921234, 0.5293214321136475, -0.6510351896286011, -0.2557438015937805, -0.35132545232772827, -0.4238301217556, -0.7979668974876404, -0.6311776041984558, -0.33664700388908386, -0.5226384401321411, -0.5189531445503235, -0.5505973100662231, 0.560664713382721, 0.40973109006881714, -0.40297287702560425, 0.13355989754199982, -0.42247655987739563, 0.21020354330539703, 0.40076905488967896, 0.6385692954063416, -0.3983513116836548, -0.584013044834137, 0.06891981512308121, -0.14121299982070923, -0.5674660205841064, -0.9295576810836792, 0.3054942786693573, -0.0824977457523346, 0.49828439950942993, 0.6217077970504761, 0.03667950630187988, 0.8731451630592346, -0.15359196066856384, 1.0016237497329712, 0.34348800778388977, -0.7793619632720947, 0.7100446820259094, -0.3866046071052551, 0.25545647740364075, 0.693871796131134, 0.18815545737743378, -0.19829393923282623, -0.6196630597114563, -1.2786661386489868, -0.7955060601234436, 0.6848187446594238, 0.40881451964378357, -0.24192139506340027, 0.027455763891339302, 0.18438050150871277, -0.3377261161804199, -0.16147688031196594, -0.756744384765625, -0.9138292670249939, -0.19441725313663483, -0.4877137243747711, 0.14077986776828766, -0.0022899124305695295, -0.35552358627319336, -0.8816841244697571, 0.9454307556152344, 0.00624005775898695, 0.6232238411903381, 0.4617145359516144, 0.13982348144054413, 0.042022235691547394, 0.44461169838905334, 0.9862525463104248, 0.7430094480514526, -0.5193523168563843, 0.40217161178588867, 0.35326457023620605, -1.0545185804367065, 0.4681091904640198, 0.32671570777893066, -0.12479225546121597, -0.05470927804708481, 0.4117903411388397, 0.4504452049732208, 0.0020938366651535034, -0.1095866709947586, 0.5950179696083069, -0.06457675248384476, -0.6056208610534668, -0.4122732877731323, 0.02398032508790493, -0.06472335755825043, 0.01909603178501129, 0.3433268666267395, -0.13161751627922058, -0.10829564929008484, -0.4984361529350281, 0.5231714844703674, 0.3826574385166168, -0.5236842632293701, -0.1547829806804657, 0.8023772835731506, -0.1699216365814209, -0.13792172074317932, 0.3567637503147125, -0.17503106594085693, -0.5843830704689026, 1.1267271041870117, 0.6328747868537903, 0.6995174288749695, -0.2626039981842041, -0.1059049740433693, 0.9848682880401611, 0.42273223400115967, -0.031092487275600433, 0.5422837734222412, 0.33714139461517334, -0.25614845752716064, 0.20073629915714264, -0.9108009934425354, 0.00045899467659182847, 0.21309319138526917, -0.808669924736023, 0.340129017829895, -0.623799204826355, -0.2668634057044983, 0.05708884447813034, 0.38994842767715454, -0.42921727895736694, 0.5432586669921875, -0.3985756039619446, 1.2551156282424927, -0.9583803415298462, 0.7242064476013184, 0.7186992168426514, -0.5562084317207336, -1.0653458833694458, -0.502783477306366, 0.024246271699666977, -0.770530641078949, 0.5702925324440002, -0.07202263921499252, 0.206121563911438, 0.015319804660975933, -0.7011673450469971, -0.914941132068634, 1.423774242401123, -0.031112898141145706, -0.42686623334884644, 0.2516617774963379, -0.04835045337677002, 0.4500909149646759, 0.17799052596092224, 0.6311423182487488, 0.7725982666015625, 0.8501187562942505, -0.004183008801192045, -0.7406372427940369, 0.3283132016658783, -0.4925217628479004, -0.34390345215797424, 0.48627379536628723, -0.9821348190307617, 1.2843738794326782, 0.03784147650003433, 0.21064427495002747, -0.17951004207134247, 0.6902286410331726, 0.7598974704742432, 0.2419363111257553, 0.30769625306129456, 0.9214953780174255, 0.8545264005661011, -0.559594988822937, 0.9610641598701477, -0.2017035186290741, 0.9141331911087036, 0.7241276502609253, 0.2603475749492645, 0.7416130900382996, 0.6712080836296082, -0.5374150276184082, 0.49218735098838806, 0.8659707903862, -0.3427436053752899, 0.4096753001213074, 0.2852357029914856, -0.17708253860473633, -0.16520926356315613, 0.5037290453910828, -0.9041882753372192, 0.161808043718338, 0.055574942380189896, -0.3838704228401184, 0.06318266689777374, -0.51353919506073, 0.33443784713745117, -0.09056329727172852, -0.06455244868993759, 0.37650641798973083, 0.04383013769984245, -0.41132545471191406, 0.9425033330917358, -0.09596080332994461, 0.7760258316993713, -0.5129620432853699, -0.0302091334015131, -0.4192144572734833, 0.6309354305267334, -0.3948749899864197, -1.006646752357483, 0.1767360270023346, 0.029414916411042213, -0.1453067511320114, -0.1376316100358963, 0.6265936493873596, -0.19843028485774994, -0.8171554207801819, 0.07878784835338593, 0.03695368021726608, 0.06941670924425125, 0.5227231383323669, -0.6918856501579285, -0.39273983240127563, -0.07805682718753815, -0.54855877161026, 0.09094661474227905, 0.2857760488986969, 0.32284030318260193, 0.5410119891166687, 0.6561619639396667, 0.19146451354026794, 0.4314471483230591, -0.5765694379806519, 0.774528443813324, -1.1019058227539062, -0.7633894681930542, -0.987088680267334, 0.43967121839523315, -0.333204060792923, -0.8858500123023987, 0.9886426329612732, 1.0568486452102661, 0.8919540047645569, -0.007246151100844145, 0.6669108271598816, -0.3439542055130005, 0.23932506144046783, -0.40361422300338745, 0.9861473441123962, -0.8188387155532837, -0.22402578592300415, -0.2379407435655594, -0.7136913537979126, -0.4222959578037262, 0.8284244537353516, -0.16481822729110718, 0.12221536785364151, 1.085723638534546, 0.6435428857803345, -0.16727009415626526, 0.0364798903465271, -0.07365556061267853, 0.6065859794616699, 0.4055727422237396, 1.027579426765442, 0.5864718556404114, -0.8080248832702637, 0.320597380399704, -0.45859432220458984, -0.40710923075675964, -0.4001680314540863, -0.47175484895706177, -0.8979060053825378, -0.46533164381980896, -0.19187381863594055, -0.6278683543205261, -0.15125146508216858, 1.0110597610473633, 0.5027679204940796, -0.9379777908325195, -0.386702299118042, -0.1570979505777359, 0.09943827986717224, -0.5988046526908875, -0.4157259166240692, 0.7613247036933899, -0.07441260665655136, -0.5976654291152954, 0.1765032410621643, -0.1428046077489853, 0.30160796642303467, 0.13689206540584564, -0.3994635343551636, -0.7765421867370605, 0.032989922910928726, 0.4248848557472229, 0.37666282057762146, -0.6716693043708801, -0.7159257531166077, 0.25412648916244507, -0.54496830701828, 0.4346767067909241, -0.007474134210497141, -0.5121735334396362, 0.08615978807210922, 0.6739084124565125, 0.4936136305332184, 0.7030138969421387, 0.010647015646100044, 0.08733149617910385, -0.7092203497886658, 0.2273387312889099, -0.011381829157471657, 0.2575276792049408, -0.07118242233991623, -0.3325856626033783, 0.7481922507286072, 0.7402951717376709, -0.49537771940231323, -1.064108967781067, -0.41587355732917786, -1.4684685468673706, -0.01548036653548479, 1.0797635316848755, 0.04631028696894646, -0.461387574672699, 0.2722136080265045, -0.07437331974506378, 0.23675169050693512, -0.2949056923389435, 0.7548570036888123, 0.8116190433502197, -0.34615278244018555, 0.06314603239297867, -0.6640880107879639, 0.40792596340179443, 0.5248087644577026, -1.167547583580017, -0.1016262024641037, 0.19764117896556854, 0.3323511779308319, 0.28151842951774597, 0.6526669859886169, -0.10234633088111877, 0.2888529300689697, 0.29568997025489807, 0.00028017416479997337, -0.06884987652301788, 0.07059851288795471, -0.2515016496181488, 0.0658809021115303, -0.2635922133922577, -0.45065227150917053 ]
open-llm-leaderboard/details_nicholasKluge__Aira-124M
open-llm-leaderboard
2023-08-29T19:05:51Z
201
0
[ "region:us" ]
null
2023-08-29T19:04:50Z
--- pretty_name: Evaluation run of nicholasKluge/Aira-124M dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [nicholasKluge/Aira-124M](https://huggingface.co/nicholasKluge/Aira-124M) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nicholasKluge__Aira-124M\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T19:04:35.532451](https://huggingface.co/datasets/open-llm-leaderboard/details_nicholasKluge__Aira-124M/blob/main/results_2023-08-29T19%3A04%3A35.532451.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25265346552614076,\n\ \ \"acc_stderr\": 0.03117857003137413,\n \"acc_norm\": 0.253799928797708,\n\ \ \"acc_norm_stderr\": 0.03119563902907945,\n \"mc1\": 0.2460220318237454,\n\ \ \"mc1_stderr\": 0.01507721920066259,\n \"mc2\": 0.41020465472810524,\n\ \ \"mc2_stderr\": 0.015012374839842264\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.19880546075085323,\n \"acc_stderr\": 0.01166285019817554,\n\ \ \"acc_norm\": 0.24573378839590443,\n \"acc_norm_stderr\": 0.012581033453730107\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2921728739294961,\n\ \ \"acc_stderr\": 0.004538319464111971,\n \"acc_norm\": 0.312885879306911,\n\ \ \"acc_norm_stderr\": 0.004627207073171273\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2074074074074074,\n\ \ \"acc_stderr\": 0.03502553170678316,\n \"acc_norm\": 0.2074074074074074,\n\ \ \"acc_norm_stderr\": 0.03502553170678316\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.2,\n\ \ \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \ \ \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21132075471698114,\n \"acc_stderr\": 0.025125766484827845,\n\ \ \"acc_norm\": 0.21132075471698114,\n \"acc_norm_stderr\": 0.025125766484827845\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \ \ \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\"\ : 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036623,\n \ \ \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n\ \ \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n\ \ \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n\ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n\ \ \"acc_stderr\": 0.04096985139843671,\n \"acc_norm\": 0.2543859649122807,\n\ \ \"acc_norm_stderr\": 0.04096985139843671\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2620689655172414,\n \"acc_stderr\": 0.036646663372252565,\n\ \ \"acc_norm\": 0.2620689655172414,\n \"acc_norm_stderr\": 0.036646663372252565\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.24867724867724866,\n \"acc_stderr\": 0.022261817692400168,\n \"\ acc_norm\": 0.24867724867724866,\n \"acc_norm_stderr\": 0.022261817692400168\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n\ \ \"acc_stderr\": 0.03333333333333337,\n \"acc_norm\": 0.16666666666666666,\n\ \ \"acc_norm_stderr\": 0.03333333333333337\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.13,\n \"acc_stderr\": 0.03379976689896309,\n \ \ \"acc_norm\": 0.13,\n \"acc_norm_stderr\": 0.03379976689896309\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.21935483870967742,\n\ \ \"acc_stderr\": 0.023540799358723285,\n \"acc_norm\": 0.21935483870967742,\n\ \ \"acc_norm_stderr\": 0.023540799358723285\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.19704433497536947,\n \"acc_stderr\": 0.02798672466673622,\n\ \ \"acc_norm\": 0.19704433497536947,\n \"acc_norm_stderr\": 0.02798672466673622\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\"\ : 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.21212121212121213,\n \"acc_stderr\": 0.03192271569548299,\n\ \ \"acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.03192271569548299\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.35353535353535354,\n \"acc_stderr\": 0.03406086723547153,\n \"\ acc_norm\": 0.35353535353535354,\n \"acc_norm_stderr\": 0.03406086723547153\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n\ \ \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.30256410256410254,\n \"acc_stderr\": 0.023290888053772725,\n\ \ \"acc_norm\": 0.30256410256410254,\n \"acc_norm_stderr\": 0.023290888053772725\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.22962962962962963,\n \"acc_stderr\": 0.02564410863926763,\n \ \ \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.02564410863926763\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.23109243697478993,\n \"acc_stderr\": 0.027381406927868966,\n\ \ \"acc_norm\": 0.23109243697478993,\n \"acc_norm_stderr\": 0.027381406927868966\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2185430463576159,\n \"acc_stderr\": 0.03374235550425694,\n \"\ acc_norm\": 0.2185430463576159,\n \"acc_norm_stderr\": 0.03374235550425694\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.3467889908256881,\n \"acc_stderr\": 0.020406097104093027,\n \"\ acc_norm\": 0.3467889908256881,\n \"acc_norm_stderr\": 0.020406097104093027\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\"\ : 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n\ \ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27941176470588236,\n\ \ \"acc_stderr\": 0.031493281045079556,\n \"acc_norm\": 0.27941176470588236,\n\ \ \"acc_norm_stderr\": 0.031493281045079556\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.28270042194092826,\n \"acc_stderr\": 0.02931281415395594,\n\ \ \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.02931281415395594\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.1210762331838565,\n\ \ \"acc_stderr\": 0.021894174113185737,\n \"acc_norm\": 0.1210762331838565,\n\ \ \"acc_norm_stderr\": 0.021894174113185737\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306086,\n\ \ \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306086\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2727272727272727,\n \"acc_stderr\": 0.04065578140908705,\n \"\ acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.04065578140908705\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n\ \ \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \ \ \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.26993865030674846,\n \"acc_stderr\": 0.03487825168497892,\n\ \ \"acc_norm\": 0.26993865030674846,\n \"acc_norm_stderr\": 0.03487825168497892\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\ \ \"acc_stderr\": 0.04246624336697624,\n \"acc_norm\": 0.2767857142857143,\n\ \ \"acc_norm_stderr\": 0.04246624336697624\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.03916667762822585,\n\ \ \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.03916667762822585\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23504273504273504,\n\ \ \"acc_stderr\": 0.027778835904935427,\n \"acc_norm\": 0.23504273504273504,\n\ \ \"acc_norm_stderr\": 0.027778835904935427\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.24521072796934865,\n\ \ \"acc_stderr\": 0.01538435228454394,\n \"acc_norm\": 0.24521072796934865,\n\ \ \"acc_norm_stderr\": 0.01538435228454394\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\ \ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.25163398692810457,\n \"acc_stderr\": 0.024848018263875195,\n\ \ \"acc_norm\": 0.25163398692810457,\n \"acc_norm_stderr\": 0.024848018263875195\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.18006430868167203,\n\ \ \"acc_stderr\": 0.021823422857744953,\n \"acc_norm\": 0.18006430868167203,\n\ \ \"acc_norm_stderr\": 0.021823422857744953\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.25308641975308643,\n \"acc_stderr\": 0.02419180860071301,\n\ \ \"acc_norm\": 0.25308641975308643,\n \"acc_norm_stderr\": 0.02419180860071301\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2730496453900709,\n \"acc_stderr\": 0.026577860943307857,\n \ \ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.026577860943307857\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25554106910039115,\n\ \ \"acc_stderr\": 0.011139857833598506,\n \"acc_norm\": 0.25554106910039115,\n\ \ \"acc_norm_stderr\": 0.011139857833598506\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.030187532060329376,\n\ \ \"acc_norm\": 0.44485294117647056,\n \"acc_norm_stderr\": 0.030187532060329376\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.18181818181818182,\n \"acc_stderr\": 0.036942843353378,\n\ \ \"acc_norm\": 0.18181818181818182,\n \"acc_norm_stderr\": 0.036942843353378\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4,\n\ \ \"acc_stderr\": 0.031362502409358936,\n \"acc_norm\": 0.4,\n \ \ \"acc_norm_stderr\": 0.031362502409358936\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.030360490154014652,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.030360490154014652\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n\ \ \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.22289156626506024,\n \"acc_stderr\": 0.03240004825594689,\n\ \ \"acc_norm\": 0.22289156626506024,\n \"acc_norm_stderr\": 0.03240004825594689\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2807017543859649,\n\ \ \"acc_stderr\": 0.034462962170884265,\n \"acc_norm\": 0.2807017543859649,\n\ \ \"acc_norm_stderr\": 0.034462962170884265\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.2460220318237454,\n \"mc1_stderr\": 0.01507721920066259,\n\ \ \"mc2\": 0.41020465472810524,\n \"mc2_stderr\": 0.015012374839842264\n\ \ }\n}\n```" repo_url: https://huggingface.co/nicholasKluge/Aira-124M leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|arc:challenge|25_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hellaswag|10_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:04:35.532451.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:04:35.532451.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T19_04_35.532451 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T19:04:35.532451.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T19:04:35.532451.parquet' - config_name: results data_files: - split: 2023_08_29T19_04_35.532451 path: - results_2023-08-29T19:04:35.532451.parquet - split: latest path: - results_2023-08-29T19:04:35.532451.parquet --- # Dataset Card for Evaluation run of nicholasKluge/Aira-124M ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/nicholasKluge/Aira-124M - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [nicholasKluge/Aira-124M](https://huggingface.co/nicholasKluge/Aira-124M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nicholasKluge__Aira-124M", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T19:04:35.532451](https://huggingface.co/datasets/open-llm-leaderboard/details_nicholasKluge__Aira-124M/blob/main/results_2023-08-29T19%3A04%3A35.532451.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25265346552614076, "acc_stderr": 0.03117857003137413, "acc_norm": 0.253799928797708, "acc_norm_stderr": 0.03119563902907945, "mc1": 0.2460220318237454, "mc1_stderr": 0.01507721920066259, "mc2": 0.41020465472810524, "mc2_stderr": 0.015012374839842264 }, "harness|arc:challenge|25": { "acc": 0.19880546075085323, "acc_stderr": 0.01166285019817554, "acc_norm": 0.24573378839590443, "acc_norm_stderr": 0.012581033453730107 }, "harness|hellaswag|10": { "acc": 0.2921728739294961, "acc_stderr": 0.004538319464111971, "acc_norm": 0.312885879306911, "acc_norm_stderr": 0.004627207073171273 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2074074074074074, "acc_stderr": 0.03502553170678316, "acc_norm": 0.2074074074074074, "acc_norm_stderr": 0.03502553170678316 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21132075471698114, "acc_stderr": 0.025125766484827845, "acc_norm": 0.21132075471698114, "acc_norm_stderr": 0.025125766484827845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.04096985139843671, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.04096985139843671 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.036646663372252565, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.036646663372252565 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.022261817692400168, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.022261817692400168 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333337, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333337 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.13, "acc_stderr": 0.03379976689896309, "acc_norm": 0.13, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.21935483870967742, "acc_stderr": 0.023540799358723285, "acc_norm": 0.21935483870967742, "acc_norm_stderr": 0.023540799358723285 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.19704433497536947, "acc_stderr": 0.02798672466673622, "acc_norm": 0.19704433497536947, "acc_norm_stderr": 0.02798672466673622 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21212121212121213, "acc_stderr": 0.03192271569548299, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.03192271569548299 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35353535353535354, "acc_stderr": 0.03406086723547153, "acc_norm": 0.35353535353535354, "acc_norm_stderr": 0.03406086723547153 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.30256410256410254, "acc_stderr": 0.023290888053772725, "acc_norm": 0.30256410256410254, "acc_norm_stderr": 0.023290888053772725 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.22962962962962963, "acc_stderr": 0.02564410863926763, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.02564410863926763 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23109243697478993, "acc_stderr": 0.027381406927868966, "acc_norm": 0.23109243697478993, "acc_norm_stderr": 0.027381406927868966 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.03374235550425694, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.03374235550425694 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3467889908256881, "acc_stderr": 0.020406097104093027, "acc_norm": 0.3467889908256881, "acc_norm_stderr": 0.020406097104093027 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.031493281045079556, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.031493281045079556 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.02931281415395594, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.02931281415395594 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.1210762331838565, "acc_stderr": 0.021894174113185737, "acc_norm": 0.1210762331838565, "acc_norm_stderr": 0.021894174113185737 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306086, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2727272727272727, "acc_stderr": 0.04065578140908705, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26993865030674846, "acc_stderr": 0.03487825168497892, "acc_norm": 0.26993865030674846, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697624, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697624 }, "harness|hendrycksTest-management|5": { "acc": 0.1941747572815534, "acc_stderr": 0.03916667762822585, "acc_norm": 0.1941747572815534, "acc_norm_stderr": 0.03916667762822585 }, "harness|hendrycksTest-marketing|5": { "acc": 0.23504273504273504, "acc_stderr": 0.027778835904935427, "acc_norm": 0.23504273504273504, "acc_norm_stderr": 0.027778835904935427 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.24521072796934865, "acc_stderr": 0.01538435228454394, "acc_norm": 0.24521072796934865, "acc_norm_stderr": 0.01538435228454394 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.024848018263875195, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.024848018263875195 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.18006430868167203, "acc_stderr": 0.021823422857744953, "acc_norm": 0.18006430868167203, "acc_norm_stderr": 0.021823422857744953 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25308641975308643, "acc_stderr": 0.02419180860071301, "acc_norm": 0.25308641975308643, "acc_norm_stderr": 0.02419180860071301 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2730496453900709, "acc_stderr": 0.026577860943307857, "acc_norm": 0.2730496453900709, "acc_norm_stderr": 0.026577860943307857 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25554106910039115, "acc_stderr": 0.011139857833598506, "acc_norm": 0.25554106910039115, "acc_norm_stderr": 0.011139857833598506 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.030187532060329376, "acc_norm": 0.44485294117647056, "acc_norm_stderr": 0.030187532060329376 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.18181818181818182, "acc_stderr": 0.036942843353378, "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.036942843353378 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.031362502409358936, "acc_norm": 0.4, "acc_norm_stderr": 0.031362502409358936 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014652, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014652 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-virology|5": { "acc": 0.22289156626506024, "acc_stderr": 0.03240004825594689, "acc_norm": 0.22289156626506024, "acc_norm_stderr": 0.03240004825594689 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2807017543859649, "acc_stderr": 0.034462962170884265, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.034462962170884265 }, "harness|truthfulqa:mc|0": { "mc1": 0.2460220318237454, "mc1_stderr": 0.01507721920066259, "mc2": 0.41020465472810524, "mc2_stderr": 0.015012374839842264 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7350066304206848, -0.8430584669113159, 0.27669933438301086, 0.19303162395954132, -0.12088972330093384, -0.06568652391433716, 0.03953425958752632, -0.20621563494205475, 0.6059940457344055, -0.09957434237003326, -0.5009605288505554, -0.6695300936698914, -0.4526948928833008, 0.22486554086208344, -0.04683493822813034, 0.7830190062522888, -0.1919480860233307, -0.12858383357524872, 0.08863445371389389, -0.019919341430068016, -0.24072393774986267, -0.3635566532611847, -0.47016647458076477, -0.362914115190506, 0.17360669374465942, 0.45343053340911865, 0.4494238495826721, 0.8216722011566162, 0.6680325269699097, 0.2887069582939148, -0.32703718543052673, -0.010600384324789047, -0.1733894646167755, -0.3179488480091095, 0.40108904242515564, -0.3226105868816376, -0.8228597044944763, 0.32752886414527893, 0.7681142091751099, 0.659163773059845, -0.10728316009044647, 0.2696051597595215, 0.010604307986795902, 0.5865586400032043, -0.37432605028152466, 0.004344000946730375, -0.29238322377204895, 0.20976783335208893, -0.17196838557720184, -0.2600122094154358, -0.2858063876628876, -0.1924845427274704, -0.1542978733778, -0.9103758335113525, 0.24599377810955048, 0.3133903443813324, 1.6257880926132202, -0.15526965260505676, -0.24883249402046204, 0.09504358470439911, -0.14188693463802338, 1.0109541416168213, -0.8513825535774231, 0.36888015270233154, 0.7776483297348022, 0.14058378338813782, -0.17043083906173706, -0.5696274638175964, -0.6300775408744812, 0.09747987240552902, -0.35593122243881226, 0.35613977909088135, -0.05973781645298004, -0.19182278215885162, 0.3663072884082794, 0.6522759199142456, -0.6642589569091797, 0.14552094042301178, -0.6458098888397217, -0.15867410600185394, 1.0521513223648071, 0.3424113988876343, 0.0630292296409607, -0.3510877192020416, -0.687069833278656, -0.6620188355445862, -0.398833692073822, 0.2518562376499176, 0.4468928575515747, 0.35350826382637024, -0.36962974071502686, 0.6955529451370239, -0.4440233111381531, 0.56032395362854, 0.40915000438690186, 0.06242286041378975, 0.8898212909698486, -0.6588490605354309, -0.542311429977417, -0.06321647018194199, 1.0685434341430664, 0.53920578956604, 0.0693766251206398, 0.22019530832767487, 0.0349392332136631, -0.09388706833124161, 0.053505100309848785, -0.887220561504364, -0.3026998043060303, 0.1472378820180893, -0.3960634469985962, -0.5020156502723694, 0.37549570202827454, -0.8922781944274902, 0.1604195535182953, -0.050225112587213516, 0.42864152789115906, -0.4628998935222626, -0.12278379499912262, 0.24949945509433746, -0.4144378900527954, 0.832112729549408, -0.18376410007476807, -0.8109031915664673, 0.4081825911998749, 0.5236644744873047, 0.7569028735160828, -0.07809571176767349, -0.44718390703201294, -0.08715978264808655, -0.11432935297489166, -0.2964654266834259, 0.5433446168899536, -0.30324485898017883, -0.4087323546409607, -0.30509576201438904, 0.29028141498565674, -0.2878148853778839, -0.339554101228714, 0.6970687508583069, -0.22055411338806152, 0.20945750176906586, -0.4238344728946686, -0.6675084829330444, 0.13962945342063904, 0.3805640637874603, -0.40661272406578064, 1.3010762929916382, 0.2550008296966553, -0.7910570502281189, 0.423127681016922, -0.5751343965530396, -0.12036126852035522, -0.048009809106588364, -0.05286402627825737, -0.790074348449707, -0.271241158246994, 0.1885577291250229, 0.4023013710975647, -0.11321374028921127, -0.1296311616897583, -0.40088221430778503, -0.3517551124095917, 0.3226872682571411, -0.1707635074853897, 1.2040636539459229, -0.008309087716042995, -0.7538676857948303, -0.12386791408061981, -1.253201961517334, 0.2968384623527527, 0.1909148097038269, -0.3775016665458679, -0.17332442104816437, -0.4962413012981415, -0.021793607622385025, 0.1873648464679718, 0.27664101123809814, -0.8095527291297913, 0.29077163338661194, -0.3736993670463562, 0.12767162919044495, 1.2648710012435913, 0.046486835926771164, 0.1531219482421875, -0.5635590553283691, 0.5458248853683472, 0.20328038930892944, 0.1748839020729065, 0.3868769109249115, -0.601426362991333, -0.8035814166069031, -0.4725242257118225, -0.06852299720048904, 0.607766330242157, -0.1907530426979065, 1.1345723867416382, 0.08346384018659592, -0.9172203540802002, -0.43164369463920593, -0.11533403396606445, 0.4778241813182831, 0.7976868152618408, 0.5918172001838684, -0.010172327980399132, -0.6139537692070007, -1.085545301437378, -0.318676233291626, -0.18872950971126556, 0.16262124478816986, 0.22793801128864288, 1.028823971748352, -0.25650161504745483, 0.571591317653656, -1.065428376197815, -0.19041238725185394, 0.19910645484924316, -0.06705619394779205, 0.794736921787262, 0.752106249332428, 0.5954267382621765, -0.6513251066207886, -0.5164217352867126, 0.18628591299057007, -0.874525249004364, -0.10412542521953583, 0.12013313919305801, -0.3377281427383423, 0.1321951299905777, 0.16112598776817322, -0.6957642436027527, 0.5561177730560303, 0.22253084182739258, -1.1284812688827515, 1.048707127571106, -0.3269919157028198, 0.5722436308860779, -1.0223183631896973, 0.2209928333759308, -0.05891304463148117, 0.06218164414167404, -0.5053290128707886, 0.048049744218587875, 0.07727047055959702, 0.41618093848228455, -0.4912930428981781, 0.8140447735786438, -0.6821156740188599, -0.05581264942884445, 0.42682895064353943, 0.0997481420636177, -0.10018011182546616, 0.3571798503398895, -0.21800543367862701, 0.7850406169891357, 0.778608500957489, -0.46428051590919495, 0.5130608677864075, 0.4154116213321686, -0.19512948393821716, 0.7057247757911682, -0.4578484296798706, -0.30815643072128296, 0.2895766496658325, -0.0673663467168808, -0.8498333096504211, -0.4993143081665039, 0.014993131160736084, -0.6303781270980835, -0.12441383302211761, 0.3873935639858246, -0.2519187927246094, -0.7860514521598816, -0.9113462567329407, 0.32942837476730347, 0.6894670128822327, -0.44243916869163513, -0.1479959338903427, 0.05202214792370796, 0.10471756756305695, -0.8313387036323547, -0.8140977621078491, -0.47695645689964294, -0.22936035692691803, -0.7153180837631226, 0.3159199655056, -0.2782115340232849, -0.29306739568710327, -0.07316912710666656, -0.2508828938007355, -0.36310404539108276, 0.03675851598381996, 0.1451604962348938, 0.6725049614906311, -0.40049779415130615, -0.3073279559612274, -0.2807677686214447, -0.1669297069311142, 0.24351420998573303, -0.09859602153301239, 0.35944750905036926, -0.4825619161128998, -0.3858809471130371, -0.43666428327560425, -0.042265720665454865, 0.7253490686416626, -0.044304974377155304, 0.7555880546569824, 0.4167945683002472, -0.341492623090744, -0.01013683620840311, -0.276459664106369, -0.29765331745147705, -0.5841180086135864, 0.2473897635936737, -0.5112965703010559, -1.0284377336502075, 0.8117880821228027, 0.5186748504638672, 0.04353063181042671, 1.1516268253326416, 0.5815702080726624, -0.2910435199737549, 1.0209814310073853, 0.017720891162753105, 0.3010038137435913, 0.387800008058548, -0.7224501371383667, 0.09943663328886032, -0.8961897492408752, -0.33412912487983704, -0.6305132508277893, -0.4885357618331909, -0.7029666304588318, -0.051360923796892166, 0.26800408959388733, 0.17959195375442505, -0.6736044883728027, 0.5939762592315674, -0.8494166731834412, 0.5974227786064148, 0.5475738644599915, 0.23323501646518707, 0.14239025115966797, -0.1638873666524887, -0.4098975360393524, -0.10694684833288193, -0.4678144156932831, -0.22859004139900208, 1.2089192867279053, 0.2617083787918091, 0.7369957566261292, 0.0910935327410698, 0.901948094367981, 0.061017151921987534, -0.1037110760807991, -0.5962110161781311, 0.6346710324287415, 0.11572721600532532, -0.8078855872154236, -0.4239733815193176, -0.5212003588676453, -1.1218866109848022, 0.3984202444553375, -0.12381980568170547, -0.8275865912437439, 0.1238800585269928, 0.013741493225097656, -0.19122819602489471, 0.4970494210720062, -0.5345304608345032, 0.840930700302124, -0.10373300313949585, -0.45367833971977234, 0.10108226537704468, -0.8178557753562927, 0.4406047463417053, 0.20857582986354828, 0.26440441608428955, 0.03825068473815918, 0.2471393644809723, 1.1863837242126465, -0.8415182828903198, 0.3892320692539215, 0.05338456109166145, 0.03273787349462509, 0.34168922901153564, -0.16889968514442444, 0.47714126110076904, 0.09845531731843948, -0.024706443771719933, -0.11584486067295074, 0.29897168278694153, -0.875752329826355, -0.05641790106892586, 0.956339418888092, -1.0035063028335571, -0.570519745349884, -0.9232336282730103, -0.5021560788154602, 0.08667727559804916, 0.5912209153175354, 0.34823745489120483, 0.5440594553947449, 0.0015098373405635357, 0.45784899592399597, 0.8401908874511719, -0.1281258463859558, 0.625387966632843, 0.2737388610839844, 0.08440501987934113, -0.6940580010414124, 0.8572350740432739, 0.09607740491628647, 0.3518122732639313, 0.28813424706459045, 0.42193397879600525, -0.5341381430625916, -0.20118670165538788, -0.21876782178878784, 0.49523359537124634, -0.640083909034729, -0.2669457793235779, -0.3558753430843353, -0.39297837018966675, -0.7995077967643738, -0.6604695320129395, -0.3037530183792114, -0.542820155620575, -0.5237022042274475, -0.49743807315826416, 0.5715497136116028, 0.4926546812057495, -0.3827388882637024, 0.04748925566673279, -0.4780506193637848, 0.28926971554756165, 0.3496592044830322, 0.5426704287528992, -0.39681726694107056, -0.5718162655830383, 0.04714305326342583, -0.15199171006679535, -0.5796049237251282, -0.9491870999336243, 0.3274928033351898, -0.025545582175254822, 0.5248615145683289, 0.5764965415000916, 0.03274112194776535, 0.8576033711433411, -0.18713383376598358, 1.0668643712997437, 0.29349637031555176, -0.7881824374198914, 0.7439796924591064, -0.35114404559135437, 0.21943439543247223, 0.6732756495475769, 0.18075817823410034, -0.22075705230236053, -0.674706220626831, -1.344104528427124, -0.7940544486045837, 0.6800577044487, 0.4033359885215759, -0.25346317887306213, 0.027755770832300186, 0.16913169622421265, -0.3269798755645752, -0.21376582980155945, -0.6468049883842468, -0.8796377182006836, -0.17231406271457672, -0.49005958437919617, 0.12307468056678772, 0.08798220753669739, -0.37517160177230835, -0.8296796083450317, 0.9926242828369141, -0.009611592628061771, 0.5960972309112549, 0.4823771119117737, 0.09983672201633453, 0.05385294929146767, 0.45808249711990356, 0.9461132884025574, 0.7435780167579651, -0.464494526386261, 0.4200286865234375, 0.3939944803714752, -1.0522432327270508, 0.4853898286819458, 0.33093321323394775, -0.058870621025562286, -0.020526621490716934, 0.45901936292648315, 0.4052762985229492, 0.009774560108780861, -0.19130650162696838, 0.6466355323791504, -0.03947306424379349, -0.5651717185974121, -0.4113464951515198, 0.08972326666116714, -0.12179818004369736, -0.07117199897766113, 0.4103756844997406, -0.13413777947425842, -0.04460342600941658, -0.49193674325942993, 0.5099542737007141, 0.3764568567276001, -0.4863012433052063, -0.15857353806495667, 0.7197790741920471, -0.17056910693645477, -0.13385628163814545, 0.3384377062320709, -0.1792953908443451, -0.6198045015335083, 1.1714173555374146, 0.557367742061615, 0.7019450664520264, -0.2802790403366089, -0.08357413113117218, 0.9383729100227356, 0.3904920518398285, -0.03751963749527931, 0.5661981105804443, 0.35206320881843567, -0.2615165710449219, 0.16796231269836426, -0.8711551427841187, -0.056091394275426865, 0.18152236938476562, -0.8271675109863281, 0.3389752507209778, -0.5379567742347717, -0.14998577535152435, 0.03387906774878502, 0.4335227608680725, -0.45594117045402527, 0.5582960844039917, -0.3896312415599823, 1.2514756917953491, -1.0132863521575928, 0.7182067632675171, 0.7678160071372986, -0.5324602127075195, -1.0628972053527832, -0.5395644307136536, 0.040790069848299026, -0.7961003184318542, 0.5694881081581116, -0.015220367349684238, 0.16279993951320648, -0.07129769027233124, -0.7165820598602295, -0.9405472278594971, 1.4117127656936646, -0.06066058203577995, -0.4174126088619232, 0.23542571067810059, -0.05438701808452606, 0.45235738158226013, 0.13038700819015503, 0.6016858816146851, 0.7397843599319458, 0.8214824199676514, -0.07235913723707199, -0.7469767332077026, 0.3247898817062378, -0.520796000957489, -0.33990177512168884, 0.49621203541755676, -0.9504457712173462, 1.1712236404418945, 0.042630262672901154, 0.21304652094841003, -0.1592937856912613, 0.6597176790237427, 0.7695630192756653, 0.3057301342487335, 0.36515340209007263, 0.9657238721847534, 0.8432324528694153, -0.4982122778892517, 0.989546000957489, -0.2479197084903717, 0.8619579672813416, 0.6743190288543701, 0.2330324947834015, 0.7646623849868774, 0.6800391674041748, -0.5381541848182678, 0.5725165605545044, 0.7723271250724792, -0.3011322021484375, 0.3791884481906891, 0.2769228518009186, -0.13731344044208527, -0.12539540231227875, 0.416364848613739, -0.8596658110618591, 0.15222498774528503, 0.07346993684768677, -0.33947059512138367, 0.07248302549123764, -0.46136146783828735, 0.3291573226451874, -0.059018298983573914, -0.023617809638381004, 0.4141089618206024, 0.027705460786819458, -0.47138091921806335, 0.9565479159355164, -0.14157262444496155, 0.7344577312469482, -0.5268187522888184, -0.10358871519565582, -0.3657851815223694, 0.603155255317688, -0.4453672170639038, -1.0325433015823364, 0.19015845656394958, 0.06201598420739174, -0.11184912919998169, -0.19739581644535065, 0.7191982865333557, -0.2048964649438858, -0.7913955450057983, 0.12370774149894714, 0.05667239427566528, 0.1045285314321518, 0.5067673325538635, -0.6734747886657715, -0.33915087580680847, -0.053359031677246094, -0.5622411966323853, 0.12099282443523407, 0.29548850655555725, 0.3155030906200409, 0.5307433605194092, 0.64317786693573, 0.17356516420841217, 0.4097515642642975, -0.5498673319816589, 0.804133415222168, -1.071803331375122, -0.7260640859603882, -0.9128549098968506, 0.4347982406616211, -0.3032713830471039, -0.8911609649658203, 0.9999261498451233, 1.0757790803909302, 0.8831211924552917, 0.01584014482796192, 0.6231588125228882, -0.39388933777809143, 0.25440454483032227, -0.3895292580127716, 0.9223157167434692, -0.8355057835578918, -0.2334943264722824, -0.2595439851284027, -0.7263388633728027, -0.3753672242164612, 0.8367716670036316, -0.12352798879146576, 0.03482010215520859, 1.0767744779586792, 0.6974192261695862, -0.10394760966300964, 0.05323579162359238, -0.06499186903238297, 0.5489793419837952, 0.4170159697532654, 0.9982391595840454, 0.6377807259559631, -0.8037763833999634, 0.3442389667034149, -0.5262551307678223, -0.4129945933818817, -0.383183091878891, -0.47234082221984863, -0.847451388835907, -0.4892030954360962, -0.21875813603401184, -0.6318342685699463, -0.13756580650806427, 1.009983777999878, 0.49374642968177795, -0.9226205348968506, -0.43670254945755005, -0.14806818962097168, 0.11765642464160919, -0.6192253232002258, -0.4288955330848694, 0.7033674716949463, -0.12392961233854294, -0.5394794940948486, 0.2386390119791031, -0.1731453835964203, 0.2367079257965088, 0.0957811027765274, -0.42034149169921875, -0.7534852027893066, 0.05449536815285683, 0.38758158683776855, 0.33953267335891724, -0.6909915208816528, -0.7391777038574219, 0.32554158568382263, -0.5125750303268433, 0.424262672662735, -0.01819142885506153, -0.5303183197975159, 0.06931989639997482, 0.7348445057868958, 0.5144643187522888, 0.6774460077285767, 0.002467339625582099, 0.06272007524967194, -0.6864619255065918, 0.15384601056575775, -0.007746123243123293, 0.27894923090934753, -0.031695473939180374, -0.3311590850353241, 0.7884470820426941, 0.6944393515586853, -0.56972336769104, -1.0519590377807617, -0.4416060447692871, -1.4439918994903564, -0.051997628062963486, 1.126579999923706, 0.0221781637519598, -0.46579617261886597, 0.2481120526790619, -0.1258046180009842, 0.17849929630756378, -0.34524959325790405, 0.7732808589935303, 0.8471052050590515, -0.39224350452423096, 0.1589728146791458, -0.6214154362678528, 0.37627989053726196, 0.5530979037284851, -1.2318072319030762, -0.07500628381967545, 0.2632879614830017, 0.3115893304347992, 0.36769184470176697, 0.6199623942375183, -0.08530855178833008, 0.2630130648612976, 0.24212610721588135, 0.015255911275744438, 0.0004976208438165486, 0.0005064865108579397, -0.287810355424881, 0.09129921346902847, -0.2458638697862625, -0.443144291639328 ]
open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-wiki30k_r_64_alpha_16
open-llm-leaderboard
2023-08-29T19:47:03Z
201
0
[ "region:us" ]
null
2023-08-29T19:46:06Z
--- pretty_name: Evaluation run of dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16](https://huggingface.co/dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-wiki30k_r_64_alpha_16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T19:45:42.675668](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-wiki30k_r_64_alpha_16/blob/main/results_2023-08-29T19%3A45%3A42.675668.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4674646723979816,\n\ \ \"acc_stderr\": 0.03520803561024559,\n \"acc_norm\": 0.47144963624975206,\n\ \ \"acc_norm_stderr\": 0.03519372000845246,\n \"mc1\": 0.24479804161566707,\n\ \ \"mc1_stderr\": 0.015051869486715014,\n \"mc2\": 0.38637509679052146,\n\ \ \"mc2_stderr\": 0.013509815622124081\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.4948805460750853,\n \"acc_stderr\": 0.01461062489030916,\n\ \ \"acc_norm\": 0.5324232081911263,\n \"acc_norm_stderr\": 0.014580637569995421\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5877315275841466,\n\ \ \"acc_stderr\": 0.004912370023913015,\n \"acc_norm\": 0.7853017327225652,\n\ \ \"acc_norm_stderr\": 0.004097736838432052\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4666666666666667,\n\ \ \"acc_stderr\": 0.043097329010363554,\n \"acc_norm\": 0.4666666666666667,\n\ \ \"acc_norm_stderr\": 0.043097329010363554\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.039777499346220734,\n\ \ \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.039777499346220734\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n\ \ \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \ \ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.4528301886792453,\n \"acc_stderr\": 0.03063562795796182,\n\ \ \"acc_norm\": 0.4528301886792453,\n \"acc_norm_stderr\": 0.03063562795796182\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4513888888888889,\n\ \ \"acc_stderr\": 0.04161402398403279,\n \"acc_norm\": 0.4513888888888889,\n\ \ \"acc_norm_stderr\": 0.04161402398403279\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n\ \ \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.41040462427745666,\n\ \ \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.41040462427745666,\n\ \ \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n\ \ \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.41702127659574467,\n \"acc_stderr\": 0.032232762667117124,\n\ \ \"acc_norm\": 0.41702127659574467,\n \"acc_norm_stderr\": 0.032232762667117124\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\ \ \"acc_stderr\": 0.04266339443159393,\n \"acc_norm\": 0.2894736842105263,\n\ \ \"acc_norm_stderr\": 0.04266339443159393\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.47586206896551725,\n \"acc_stderr\": 0.041618085035015295,\n\ \ \"acc_norm\": 0.47586206896551725,\n \"acc_norm_stderr\": 0.041618085035015295\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708628,\n \"\ acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708628\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\ \ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\ \ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621503,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621503\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4967741935483871,\n\ \ \"acc_stderr\": 0.02844341422643833,\n \"acc_norm\": 0.4967741935483871,\n\ \ \"acc_norm_stderr\": 0.02844341422643833\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.3497536945812808,\n \"acc_stderr\": 0.03355400904969566,\n\ \ \"acc_norm\": 0.3497536945812808,\n \"acc_norm_stderr\": 0.03355400904969566\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\"\ : 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.593939393939394,\n \"acc_stderr\": 0.03834816355401181,\n\ \ \"acc_norm\": 0.593939393939394,\n \"acc_norm_stderr\": 0.03834816355401181\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.4898989898989899,\n \"acc_stderr\": 0.03561625488673745,\n \"\ acc_norm\": 0.4898989898989899,\n \"acc_norm_stderr\": 0.03561625488673745\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7150259067357513,\n \"acc_stderr\": 0.0325771407770966,\n\ \ \"acc_norm\": 0.7150259067357513,\n \"acc_norm_stderr\": 0.0325771407770966\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.43846153846153846,\n \"acc_stderr\": 0.025158266016868564,\n\ \ \"acc_norm\": 0.43846153846153846,\n \"acc_norm_stderr\": 0.025158266016868564\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2814814814814815,\n \"acc_stderr\": 0.027420019350945277,\n \ \ \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.027420019350945277\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.4327731092436975,\n \"acc_stderr\": 0.03218358107742613,\n \ \ \"acc_norm\": 0.4327731092436975,\n \"acc_norm_stderr\": 0.03218358107742613\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943342,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943342\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.634862385321101,\n\ \ \"acc_stderr\": 0.02064280145438401,\n \"acc_norm\": 0.634862385321101,\n\ \ \"acc_norm_stderr\": 0.02064280145438401\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.030058202704309846,\n\ \ \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.030058202704309846\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.5637254901960784,\n \"acc_stderr\": 0.034806931384570396,\n \"\ acc_norm\": 0.5637254901960784,\n \"acc_norm_stderr\": 0.034806931384570396\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.6118143459915611,\n \"acc_stderr\": 0.031722950043323296,\n \ \ \"acc_norm\": 0.6118143459915611,\n \"acc_norm_stderr\": 0.031722950043323296\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5605381165919282,\n\ \ \"acc_stderr\": 0.03331092511038179,\n \"acc_norm\": 0.5605381165919282,\n\ \ \"acc_norm_stderr\": 0.03331092511038179\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262972,\n\ \ \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262972\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\"\ : 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5277777777777778,\n\ \ \"acc_stderr\": 0.048262172941398944,\n \"acc_norm\": 0.5277777777777778,\n\ \ \"acc_norm_stderr\": 0.048262172941398944\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.50920245398773,\n \"acc_stderr\": 0.03927705600787443,\n\ \ \"acc_norm\": 0.50920245398773,\n \"acc_norm_stderr\": 0.03927705600787443\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\ \ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \ \ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.5436893203883495,\n \"acc_stderr\": 0.04931801994220416,\n\ \ \"acc_norm\": 0.5436893203883495,\n \"acc_norm_stderr\": 0.04931801994220416\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6837606837606838,\n\ \ \"acc_stderr\": 0.03046365674734027,\n \"acc_norm\": 0.6837606837606838,\n\ \ \"acc_norm_stderr\": 0.03046365674734027\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \ \ \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6398467432950191,\n\ \ \"acc_stderr\": 0.017166362471369306,\n \"acc_norm\": 0.6398467432950191,\n\ \ \"acc_norm_stderr\": 0.017166362471369306\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.026919095102908273,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.026919095102908273\n \ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.4803921568627451,\n \"acc_stderr\": 0.028607893699576066,\n\ \ \"acc_norm\": 0.4803921568627451,\n \"acc_norm_stderr\": 0.028607893699576066\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5787781350482315,\n\ \ \"acc_stderr\": 0.028043399858210628,\n \"acc_norm\": 0.5787781350482315,\n\ \ \"acc_norm_stderr\": 0.028043399858210628\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5061728395061729,\n \"acc_stderr\": 0.027818623962583295,\n\ \ \"acc_norm\": 0.5061728395061729,\n \"acc_norm_stderr\": 0.027818623962583295\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.36524822695035464,\n \"acc_stderr\": 0.028723863853281278,\n \ \ \"acc_norm\": 0.36524822695035464,\n \"acc_norm_stderr\": 0.028723863853281278\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.37222946544980445,\n\ \ \"acc_stderr\": 0.01234624129720437,\n \"acc_norm\": 0.37222946544980445,\n\ \ \"acc_norm_stderr\": 0.01234624129720437\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.03033257809455504,\n\ \ \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.03033257809455504\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.45751633986928103,\n \"acc_stderr\": 0.02015468571259089,\n \ \ \"acc_norm\": 0.45751633986928103,\n \"acc_norm_stderr\": 0.02015468571259089\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5272727272727272,\n\ \ \"acc_stderr\": 0.04782001791380061,\n \"acc_norm\": 0.5272727272727272,\n\ \ \"acc_norm_stderr\": 0.04782001791380061\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.49387755102040815,\n \"acc_stderr\": 0.03200682020163908,\n\ \ \"acc_norm\": 0.49387755102040815,\n \"acc_norm_stderr\": 0.03200682020163908\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6417910447761194,\n\ \ \"acc_stderr\": 0.03390393042268813,\n \"acc_norm\": 0.6417910447761194,\n\ \ \"acc_norm_stderr\": 0.03390393042268813\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.40963855421686746,\n\ \ \"acc_stderr\": 0.03828401115079023,\n \"acc_norm\": 0.40963855421686746,\n\ \ \"acc_norm_stderr\": 0.03828401115079023\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7134502923976608,\n \"acc_stderr\": 0.03467826685703826,\n\ \ \"acc_norm\": 0.7134502923976608,\n \"acc_norm_stderr\": 0.03467826685703826\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24479804161566707,\n\ \ \"mc1_stderr\": 0.015051869486715014,\n \"mc2\": 0.38637509679052146,\n\ \ \"mc2_stderr\": 0.013509815622124081\n }\n}\n```" repo_url: https://huggingface.co/dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|arc:challenge|25_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hellaswag|10_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:45:42.675668.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T19:45:42.675668.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T19_45_42.675668 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T19:45:42.675668.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T19:45:42.675668.parquet' - config_name: results data_files: - split: 2023_08_29T19_45_42.675668 path: - results_2023-08-29T19:45:42.675668.parquet - split: latest path: - results_2023-08-29T19:45:42.675668.parquet --- # Dataset Card for Evaluation run of dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16](https://huggingface.co/dhmeltzer/Llama-2-7b-hf-wiki30k_r_64_alpha_16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-wiki30k_r_64_alpha_16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T19:45:42.675668](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-wiki30k_r_64_alpha_16/blob/main/results_2023-08-29T19%3A45%3A42.675668.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4674646723979816, "acc_stderr": 0.03520803561024559, "acc_norm": 0.47144963624975206, "acc_norm_stderr": 0.03519372000845246, "mc1": 0.24479804161566707, "mc1_stderr": 0.015051869486715014, "mc2": 0.38637509679052146, "mc2_stderr": 0.013509815622124081 }, "harness|arc:challenge|25": { "acc": 0.4948805460750853, "acc_stderr": 0.01461062489030916, "acc_norm": 0.5324232081911263, "acc_norm_stderr": 0.014580637569995421 }, "harness|hellaswag|10": { "acc": 0.5877315275841466, "acc_stderr": 0.004912370023913015, "acc_norm": 0.7853017327225652, "acc_norm_stderr": 0.004097736838432052 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.039777499346220734, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.039777499346220734 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4528301886792453, "acc_stderr": 0.03063562795796182, "acc_norm": 0.4528301886792453, "acc_norm_stderr": 0.03063562795796182 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4513888888888889, "acc_stderr": 0.04161402398403279, "acc_norm": 0.4513888888888889, "acc_norm_stderr": 0.04161402398403279 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895537, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.032232762667117124, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.032232762667117124 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159393, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159393 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.041618085035015295, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.041618085035015295 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708628, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708628 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621503, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621503 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4967741935483871, "acc_stderr": 0.02844341422643833, "acc_norm": 0.4967741935483871, "acc_norm_stderr": 0.02844341422643833 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3497536945812808, "acc_stderr": 0.03355400904969566, "acc_norm": 0.3497536945812808, "acc_norm_stderr": 0.03355400904969566 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.593939393939394, "acc_stderr": 0.03834816355401181, "acc_norm": 0.593939393939394, "acc_norm_stderr": 0.03834816355401181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.4898989898989899, "acc_stderr": 0.03561625488673745, "acc_norm": 0.4898989898989899, "acc_norm_stderr": 0.03561625488673745 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7150259067357513, "acc_stderr": 0.0325771407770966, "acc_norm": 0.7150259067357513, "acc_norm_stderr": 0.0325771407770966 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.43846153846153846, "acc_stderr": 0.025158266016868564, "acc_norm": 0.43846153846153846, "acc_norm_stderr": 0.025158266016868564 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945277, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945277 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4327731092436975, "acc_stderr": 0.03218358107742613, "acc_norm": 0.4327731092436975, "acc_norm_stderr": 0.03218358107742613 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943342, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943342 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.634862385321101, "acc_stderr": 0.02064280145438401, "acc_norm": 0.634862385321101, "acc_norm_stderr": 0.02064280145438401 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2638888888888889, "acc_stderr": 0.030058202704309846, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.030058202704309846 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5637254901960784, "acc_stderr": 0.034806931384570396, "acc_norm": 0.5637254901960784, "acc_norm_stderr": 0.034806931384570396 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6118143459915611, "acc_stderr": 0.031722950043323296, "acc_norm": 0.6118143459915611, "acc_norm_stderr": 0.031722950043323296 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5605381165919282, "acc_stderr": 0.03331092511038179, "acc_norm": 0.5605381165919282, "acc_norm_stderr": 0.03331092511038179 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262972, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262972 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443, "acc_norm": 0.50920245398773, "acc_norm_stderr": 0.03927705600787443 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.5436893203883495, "acc_stderr": 0.04931801994220416, "acc_norm": 0.5436893203883495, "acc_norm_stderr": 0.04931801994220416 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6837606837606838, "acc_stderr": 0.03046365674734027, "acc_norm": 0.6837606837606838, "acc_norm_stderr": 0.03046365674734027 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6398467432950191, "acc_stderr": 0.017166362471369306, "acc_norm": 0.6398467432950191, "acc_norm_stderr": 0.017166362471369306 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5, "acc_stderr": 0.026919095102908273, "acc_norm": 0.5, "acc_norm_stderr": 0.026919095102908273 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4803921568627451, "acc_stderr": 0.028607893699576066, "acc_norm": 0.4803921568627451, "acc_norm_stderr": 0.028607893699576066 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5787781350482315, "acc_stderr": 0.028043399858210628, "acc_norm": 0.5787781350482315, "acc_norm_stderr": 0.028043399858210628 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5061728395061729, "acc_stderr": 0.027818623962583295, "acc_norm": 0.5061728395061729, "acc_norm_stderr": 0.027818623962583295 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.36524822695035464, "acc_stderr": 0.028723863853281278, "acc_norm": 0.36524822695035464, "acc_norm_stderr": 0.028723863853281278 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.37222946544980445, "acc_stderr": 0.01234624129720437, "acc_norm": 0.37222946544980445, "acc_norm_stderr": 0.01234624129720437 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5257352941176471, "acc_stderr": 0.03033257809455504, "acc_norm": 0.5257352941176471, "acc_norm_stderr": 0.03033257809455504 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.45751633986928103, "acc_stderr": 0.02015468571259089, "acc_norm": 0.45751633986928103, "acc_norm_stderr": 0.02015468571259089 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.49387755102040815, "acc_stderr": 0.03200682020163908, "acc_norm": 0.49387755102040815, "acc_norm_stderr": 0.03200682020163908 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6417910447761194, "acc_stderr": 0.03390393042268813, "acc_norm": 0.6417910447761194, "acc_norm_stderr": 0.03390393042268813 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079023, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079023 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7134502923976608, "acc_stderr": 0.03467826685703826, "acc_norm": 0.7134502923976608, "acc_norm_stderr": 0.03467826685703826 }, "harness|truthfulqa:mc|0": { "mc1": 0.24479804161566707, "mc1_stderr": 0.015051869486715014, "mc2": 0.38637509679052146, "mc2_stderr": 0.013509815622124081 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7346120476722717, -0.8399206399917603, 0.30627989768981934, 0.22859884798526764, -0.17593462765216827, -0.01724482886493206, 0.01931462064385414, -0.2324821501970291, 0.5547397136688232, -0.06125875934958458, -0.49776360392570496, -0.6899271011352539, -0.4589032828807831, 0.2646219730377197, -0.043242137879133224, 0.8470402956008911, -0.21881940960884094, -0.14379742741584778, 0.07225830852985382, -0.029129546135663986, -0.23465995490550995, -0.33661949634552, -0.47622209787368774, -0.39775991439819336, 0.21520370244979858, 0.42888563871383667, 0.4587777554988861, 0.8531802296638489, 0.7177570462226868, 0.299491286277771, -0.338850200176239, -0.02315487153828144, -0.1751461625099182, -0.28737857937812805, 0.40627798438072205, -0.35538041591644287, -0.8580854535102844, 0.26950332522392273, 0.7548578381538391, 0.6080125570297241, -0.11319983005523682, 0.3270922005176544, 0.026586798951029778, 0.6014513373374939, -0.3587576448917389, 0.053389690816402435, -0.26038673520088196, 0.22634336352348328, -0.20461410284042358, -0.2608015537261963, -0.28335127234458923, -0.1900108903646469, -0.12571318447589874, -0.8594416975975037, 0.2498900294303894, 0.3199039697647095, 1.5442523956298828, -0.10588463395833969, -0.24007682502269745, 0.08655217289924622, -0.09467913210391998, 1.0092779397964478, -0.8615297079086304, 0.35345321893692017, 0.8047885298728943, 0.11812914907932281, -0.2031017690896988, -0.587260365486145, -0.6371368169784546, 0.0505678728222847, -0.35957252979278564, 0.3252999484539032, -0.07861732691526413, -0.14330175518989563, 0.3828636705875397, 0.6878742575645447, -0.6443927884101868, 0.16840241849422455, -0.6351284980773926, -0.15079249441623688, 1.0845086574554443, 0.34861329197883606, 0.06018809974193573, -0.386976957321167, -0.68050217628479, -0.672085165977478, -0.4133043885231018, 0.24247635900974274, 0.46948811411857605, 0.3004821538925171, -0.44053971767425537, 0.6854609847068787, -0.41440728306770325, 0.5624071955680847, 0.4195452630519867, -0.02624811790883541, 0.8868830800056458, -0.6513393521308899, -0.5145629048347473, -0.03848514333367348, 1.0870980024337769, 0.5616826415061951, 0.03131144121289253, 0.21940815448760986, -0.007134465966373682, -0.09125885367393494, 0.03306974098086357, -0.8736076951026917, -0.3186895549297333, 0.15832959115505219, -0.39105936884880066, -0.5374855399131775, 0.2979940176010132, -0.8931556344032288, 0.13100725412368774, -0.04729321971535683, 0.40188729763031006, -0.4473716616630554, -0.13061955571174622, 0.21432709693908691, -0.4178536832332611, 0.7922927737236023, -0.1698795109987259, -0.7927790284156799, 0.4008229374885559, 0.5179663300514221, 0.7781055569648743, -0.12690211832523346, -0.4327230155467987, -0.1027236059308052, -0.11007123440504074, -0.2764824330806732, 0.5004485249519348, -0.2747499942779541, -0.4230317771434784, -0.28295353055000305, 0.26856762170791626, -0.21127192676067352, -0.3520021438598633, 0.7207420468330383, -0.2629130780696869, 0.1702602058649063, -0.46956121921539307, -0.6163812279701233, 0.1272331178188324, 0.40879255533218384, -0.41996803879737854, 1.333114504814148, 0.26759544014930725, -0.839562177658081, 0.3863542973995209, -0.5896848440170288, -0.14819768071174622, -0.03975599631667137, -0.05209226533770561, -0.8042371273040771, -0.26729050278663635, 0.16982142627239227, 0.3876325488090515, -0.14946772158145905, -0.10520618408918381, -0.3956899344921112, -0.3805179297924042, 0.34164246916770935, -0.16972647607326508, 1.203138828277588, -0.04396842047572136, -0.7804139256477356, -0.14299003779888153, -1.24066162109375, 0.2891329526901245, 0.2270711064338684, -0.4145832061767578, -0.16938067972660065, -0.4761727452278137, -0.03524775803089142, 0.18939828872680664, 0.29156479239463806, -0.7930689454078674, 0.2883646786212921, -0.33347851037979126, 0.15511688590049744, 1.2906849384307861, 0.030612504109740257, 0.13673615455627441, -0.5714895725250244, 0.5281906127929688, 0.21132829785346985, 0.18024693429470062, 0.38354772329330444, -0.6179416179656982, -0.8361615538597107, -0.5014729499816895, -0.031199663877487183, 0.5866867303848267, -0.20907126367092133, 1.1167386770248413, 0.07044804841279984, -0.8729368448257446, -0.464984655380249, -0.09890283644199371, 0.532269299030304, 0.788597047328949, 0.5994831919670105, -0.034687940031290054, -0.6352435946464539, -1.132131576538086, -0.25900158286094666, -0.19822585582733154, 0.15031926333904266, 0.21108706295490265, 1.0663343667984009, -0.27467817068099976, 0.5811027884483337, -1.0533857345581055, -0.18415431678295135, 0.1696660816669464, -0.07396521419286728, 0.7652807235717773, 0.6985689401626587, 0.5823420286178589, -0.6792245507240295, -0.5219065546989441, 0.16614720225334167, -0.8905134201049805, -0.08641280233860016, 0.16292962431907654, -0.3429936468601227, 0.12369689345359802, 0.0805453211069107, -0.7195397019386292, 0.5260561108589172, 0.2521393597126007, -1.0306026935577393, 1.0505436658859253, -0.3152449429035187, 0.5902159214019775, -0.9899168014526367, 0.17057812213897705, -0.02285683900117874, 0.049390409141778946, -0.5132051110267639, 0.018230149522423744, 0.09297280758619308, 0.47963064908981323, -0.4707394242286682, 0.7809267044067383, -0.6830537915229797, -0.0930577889084816, 0.4185030460357666, 0.10557571798563004, -0.10953707993030548, 0.39718228578567505, -0.27372053265571594, 0.8155912756919861, 0.7398227453231812, -0.4754176139831543, 0.5305529832839966, 0.45559945702552795, -0.21901127696037292, 0.694702684879303, -0.5068458318710327, -0.2592398226261139, 0.2876163423061371, -0.04007749259471893, -0.836402416229248, -0.5006035566329956, 0.0859265998005867, -0.5868503451347351, -0.08527855575084686, 0.35747408866882324, -0.30478179454803467, -0.8170574307441711, -0.9357468485832214, 0.3560273349285126, 0.7483180165290833, -0.46118611097335815, -0.2017098218202591, 0.0794873908162117, 0.09923374652862549, -0.8513356447219849, -0.8267560005187988, -0.4705983102321625, -0.23305749893188477, -0.6891269683837891, 0.3701646029949188, -0.27441999316215515, -0.3187328577041626, -0.09258182346820831, -0.23707924783229828, -0.3156907260417938, 0.01756499893963337, 0.15862900018692017, 0.6853904724121094, -0.38522273302078247, -0.28303664922714233, -0.22500361502170563, -0.15788891911506653, 0.23035617172718048, -0.06545325368642807, 0.37712010741233826, -0.4851795732975006, -0.40833812952041626, -0.4334353506565094, -0.04926485940814018, 0.7064493298530579, -0.07762420177459717, 0.717275857925415, 0.42384156584739685, -0.31900715827941895, 0.0152058694511652, -0.29643258452415466, -0.28398793935775757, -0.5889735817909241, 0.27829161286354065, -0.4893732964992523, -1.0248690843582153, 0.7653546929359436, 0.5409991145133972, 0.07875911146402359, 1.1200530529022217, 0.6375322341918945, -0.2937597334384918, 1.0424383878707886, 0.06085270643234253, 0.3350318968296051, 0.37745216488838196, -0.6854749321937561, 0.10258478671312332, -0.948782742023468, -0.30496516823768616, -0.5632752776145935, -0.5077192783355713, -0.7180994749069214, -0.06979735940694809, 0.28738006949424744, 0.15536172688007355, -0.6700427532196045, 0.5757342576980591, -0.8148207664489746, 0.5514248609542847, 0.5572212338447571, 0.2684081792831421, 0.18231415748596191, -0.11237170547246933, -0.3900381326675415, -0.10647045820951462, -0.437315970659256, -0.2634906470775604, 1.2260736227035522, 0.2864185571670532, 0.7166280746459961, 0.0753188505768776, 0.9219297170639038, 0.12858964502811432, -0.039549849927425385, -0.5876321792602539, 0.6411760449409485, 0.14691856503486633, -0.7964838743209839, -0.40204501152038574, -0.5103545188903809, -1.0540155172348022, 0.4368763566017151, -0.10701720416545868, -0.8386326432228088, 0.09170026332139969, 0.04823453724384308, -0.16776885092258453, 0.48646441102027893, -0.5211973786354065, 0.7983020544052124, -0.14626148343086243, -0.4864143133163452, 0.06693559139966965, -0.8266228437423706, 0.47830522060394287, 0.18482159078121185, 0.2560610771179199, 0.003120542736724019, 0.24648405611515045, 1.220136284828186, -0.7764570116996765, 0.4322512745857239, 0.07582737505435944, 0.027598051354289055, 0.322338730096817, -0.18806368112564087, 0.5010022521018982, 0.09049507230520248, -0.03674381598830223, -0.07139069586992264, 0.27072712779045105, -0.8437400460243225, -0.06250753998756409, 0.938827633857727, -0.981217622756958, -0.6398001313209534, -0.8962517380714417, -0.5370493531227112, 0.08075279742479324, 0.5511462092399597, 0.34523698687553406, 0.4906424582004547, 0.026447296142578125, 0.423591673374176, 0.8151292204856873, -0.0975538045167923, 0.6105327606201172, 0.23394380509853363, 0.13269078731536865, -0.6696308851242065, 0.8006214499473572, 0.06675684452056885, 0.3436339199542999, 0.26153436303138733, 0.3882429301738739, -0.5327149033546448, -0.21147289872169495, -0.2657366693019867, 0.5186916589736938, -0.6476668119430542, -0.27775290608406067, -0.3728998005390167, -0.35294920206069946, -0.7111798524856567, -0.6030499339103699, -0.31697750091552734, -0.5020315647125244, -0.49144861102104187, -0.48838117718696594, 0.5927968621253967, 0.4929117262363434, -0.35777074098587036, 0.07180175185203552, -0.5073144435882568, 0.27352041006088257, 0.30528321862220764, 0.5313305258750916, -0.3409227132797241, -0.5779092907905579, 0.03434927016496658, -0.09448253363370895, -0.5994082093238831, -0.973029613494873, 0.3341831862926483, -0.039761919528245926, 0.5318131446838379, 0.5577483177185059, 0.07996691763401031, 0.8606951236724854, -0.22450466454029083, 1.0306998491287231, 0.35456424951553345, -0.7773505449295044, 0.7057375907897949, -0.31496933102607727, 0.159603089094162, 0.6347534656524658, 0.1931515634059906, -0.18544474244117737, -0.6468744277954102, -1.2855358123779297, -0.772497296333313, 0.6781064867973328, 0.41136693954467773, -0.25868064165115356, 0.05692460760474205, 0.18397469818592072, -0.2950471341609955, -0.13393248617649078, -0.6634166240692139, -0.8951588869094849, -0.13721361756324768, -0.5109671950340271, 0.09110713750123978, 0.016302255913615227, -0.4338950216770172, -0.8276770114898682, 0.9380677938461304, 0.01262794528156519, 0.5778014063835144, 0.4566137194633484, 0.045885514467954636, 0.03403548151254654, 0.4700227379798889, 0.8952454924583435, 0.7635767459869385, -0.44266727566719055, 0.4347022771835327, 0.4107784032821655, -1.0472575426101685, 0.5135591626167297, 0.3055430054664612, -0.06729189306497574, -0.03746781870722771, 0.48054641485214233, 0.44175606966018677, 0.06164490804076195, -0.2179863005876541, 0.6139647364616394, -0.005907742772251368, -0.5688667893409729, -0.3838983476161957, 0.100468710064888, -0.10307668894529343, 0.022998809814453125, 0.3952133059501648, -0.1724577397108078, -0.02869437448680401, -0.45466166734695435, 0.48599255084991455, 0.3460468053817749, -0.45852112770080566, -0.19487211108207703, 0.709522008895874, -0.19318360090255737, -0.16149669885635376, 0.3710559010505676, -0.19814980030059814, -0.6347187161445618, 1.1352132558822632, 0.5938319563865662, 0.6840872168540955, -0.23318445682525635, -0.057188186794519424, 0.8989827036857605, 0.355183869600296, -0.06993584334850311, 0.49248459935188293, 0.2728288173675537, -0.24413804709911346, 0.18536631762981415, -0.9088768362998962, -0.05000549927353859, 0.14159123599529266, -0.8466611504554749, 0.31570351123809814, -0.4896465241909027, -0.2149372398853302, 0.025436054915189743, 0.42700523138046265, -0.47406548261642456, 0.5029321908950806, -0.41101354360580444, 1.178054690361023, -0.9725950956344604, 0.7057139277458191, 0.7778070569038391, -0.5124291181564331, -0.9823815822601318, -0.5055932402610779, 0.025728320702910423, -0.8303501605987549, 0.5951526165008545, -0.021765360608696938, 0.17271488904953003, -0.05983976274728775, -0.7023730874061584, -0.9315988421440125, 1.4242949485778809, -0.046098824590444565, -0.4591997265815735, 0.20283019542694092, -0.06516874581575394, 0.46302536129951477, 0.15807673335075378, 0.5571773648262024, 0.7840110063552856, 0.8207756876945496, -0.07300713658332825, -0.7493549585342407, 0.34832748770713806, -0.5104795694351196, -0.30491769313812256, 0.4707092046737671, -0.9138981103897095, 1.1936839818954468, -0.03387538343667984, 0.22156748175621033, -0.1833193004131317, 0.6587164998054504, 0.8440523147583008, 0.28799062967300415, 0.3834807574748993, 0.9268638491630554, 0.8351052403450012, -0.47932881116867065, 1.0284582376480103, -0.21854977309703827, 0.8687670826911926, 0.6516197323799133, 0.20751512050628662, 0.7693322896957397, 0.7083495259284973, -0.5922342538833618, 0.5568104982376099, 0.8232104778289795, -0.33183929324150085, 0.4245748817920685, 0.29059678316116333, -0.13926970958709717, -0.14887838065624237, 0.4170535206794739, -0.8671544194221497, 0.13756835460662842, 0.06927666813135147, -0.34362542629241943, 0.07389306277036667, -0.4606695771217346, 0.30716440081596375, -0.12283822149038315, -0.015915216878056526, 0.3428964614868164, 0.08341952413320541, -0.45902466773986816, 0.9604852795600891, -0.10654620826244354, 0.7539634108543396, -0.5082883238792419, -0.11662229150533676, -0.3546559512615204, 0.591004490852356, -0.4442293345928192, -1.0477741956710815, 0.13791689276695251, 0.09162718057632446, -0.13739849627017975, -0.15148048102855682, 0.6857312917709351, -0.18293385207653046, -0.7471056580543518, 0.14746296405792236, 0.052553534507751465, 0.08459945023059845, 0.5235564112663269, -0.6807000041007996, -0.29491114616394043, -0.05803179740905762, -0.596297025680542, 0.14648060500621796, 0.23638543486595154, 0.27595263719558716, 0.5566707849502563, 0.6512559652328491, 0.133165642619133, 0.3981022238731384, -0.5543972849845886, 0.8191931247711182, -1.0173600912094116, -0.7370686531066895, -0.9562565684318542, 0.4598549008369446, -0.32445842027664185, -0.8608423471450806, 0.9868935942649841, 1.0023151636123657, 0.8832762837409973, 0.011308212764561176, 0.6364687085151672, -0.3757520616054535, 0.2048615664243698, -0.37130972743034363, 0.9185584187507629, -0.8597663640975952, -0.19908323884010315, -0.24706989526748657, -0.7091140747070312, -0.33794036507606506, 0.8401357531547546, -0.15783506631851196, 0.039579905569553375, 1.0452507734298706, 0.6635331511497498, -0.0938606858253479, 0.036148473620414734, -0.07189198583364487, 0.5542530417442322, 0.3975120484828949, 0.9627220034599304, 0.6554539799690247, -0.780502200126648, 0.3472052216529846, -0.5441153049468994, -0.4382779896259308, -0.408278226852417, -0.4515286386013031, -0.8941478133201599, -0.48352330923080444, -0.23070889711380005, -0.619429349899292, -0.10995817929506302, 0.9944632053375244, 0.46280473470687866, -0.8947809934616089, -0.414233922958374, -0.10021302849054337, 0.15871737897396088, -0.5674167275428772, -0.4024282991886139, 0.7300459742546082, -0.08949153125286102, -0.5441820621490479, 0.1739703118801117, -0.1406835913658142, 0.2285282462835312, 0.07763649523258209, -0.44372352957725525, -0.697024941444397, -0.013693543151021004, 0.43689584732055664, 0.2961413860321045, -0.7094680666923523, -0.7037703394889832, 0.31029778718948364, -0.5240703225135803, 0.4512087106704712, -0.029490133747458458, -0.4875386357307434, 0.025588471442461014, 0.7007096409797668, 0.5039242506027222, 0.6980011463165283, -0.02641868405044079, 0.06552297621965408, -0.6772150993347168, 0.16253778338432312, -0.013710683211684227, 0.2817949652671814, -0.0068778893910348415, -0.33169835805892944, 0.7550793886184692, 0.6614205837249756, -0.5295533537864685, -1.1021631956100464, -0.404928982257843, -1.414289116859436, -0.0017981823766604066, 1.142137050628662, 0.010326452553272247, -0.5000208020210266, 0.2608036696910858, -0.12091704457998276, 0.2244185209274292, -0.32052189111709595, 0.793066680431366, 0.8161176443099976, -0.37869712710380554, 0.10550236701965332, -0.6799082159996033, 0.3811757266521454, 0.5159561634063721, -1.180828332901001, -0.10888633131980896, 0.22911012172698975, 0.31567811965942383, 0.3468196392059326, 0.6320473551750183, -0.10216856002807617, 0.27063265442848206, 0.222651407122612, 0.035277463495731354, -0.030102184042334557, 0.040389642119407654, -0.23221665620803833, 0.06250981986522675, -0.2224128246307373, -0.45378589630126953 ]
open-llm-leaderboard/details_xzuyn__LLaMa-2-LIMA-7B-QLoRA_v2
open-llm-leaderboard
2023-08-29T20:44:42Z
201
0
[ "region:us" ]
null
2023-08-29T20:43:43Z
--- pretty_name: Evaluation run of xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2](https://huggingface.co/xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xzuyn__LLaMa-2-LIMA-7B-QLoRA_v2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T20:43:19.802214](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-LIMA-7B-QLoRA_v2/blob/main/results_2023-08-29T20%3A43%3A19.802214.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.42772147627937,\n \"acc_stderr\"\ : 0.0352621795234267,\n \"acc_norm\": 0.43164590880132125,\n \"acc_norm_stderr\"\ : 0.03524758282529255,\n \"mc1\": 0.28151774785801714,\n \"mc1_stderr\"\ : 0.01574402724825605,\n \"mc2\": 0.42597374324834275,\n \"mc2_stderr\"\ : 0.015781635062808143\n },\n \"harness|arc:challenge|25\": {\n \"\ acc\": 0.4880546075085324,\n \"acc_stderr\": 0.014607220340597171,\n \ \ \"acc_norm\": 0.5273037542662116,\n \"acc_norm_stderr\": 0.014589589101985994\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6005775741884087,\n\ \ \"acc_stderr\": 0.004887787255353494,\n \"acc_norm\": 0.7928699462258514,\n\ \ \"acc_norm_stderr\": 0.004044213304049376\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n\ \ \"acc_stderr\": 0.04284958639753399,\n \"acc_norm\": 0.43703703703703706,\n\ \ \"acc_norm_stderr\": 0.04284958639753399\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.4276315789473684,\n \"acc_stderr\": 0.04026097083296558,\n\ \ \"acc_norm\": 0.4276315789473684,\n \"acc_norm_stderr\": 0.04026097083296558\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.44,\n\ \ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.41509433962264153,\n \"acc_stderr\": 0.030325945789286105,\n\ \ \"acc_norm\": 0.41509433962264153,\n \"acc_norm_stderr\": 0.030325945789286105\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4583333333333333,\n\ \ \"acc_stderr\": 0.04166666666666665,\n \"acc_norm\": 0.4583333333333333,\n\ \ \"acc_norm_stderr\": 0.04166666666666665\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n\ \ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3468208092485549,\n\ \ \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.3468208092485549,\n\ \ \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n\ \ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.39574468085106385,\n \"acc_stderr\": 0.03196758697835362,\n\ \ \"acc_norm\": 0.39574468085106385,\n \"acc_norm_stderr\": 0.03196758697835362\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.43448275862068964,\n \"acc_stderr\": 0.04130740879555497,\n\ \ \"acc_norm\": 0.43448275862068964,\n \"acc_norm_stderr\": 0.04130740879555497\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2724867724867725,\n \"acc_stderr\": 0.02293097307163335,\n \"\ acc_norm\": 0.2724867724867725,\n \"acc_norm_stderr\": 0.02293097307163335\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n\ \ \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n\ \ \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.43548387096774194,\n \"acc_stderr\": 0.028206225591502737,\n \"\ acc_norm\": 0.43548387096774194,\n \"acc_norm_stderr\": 0.028206225591502737\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.3645320197044335,\n \"acc_stderr\": 0.033864057460620905,\n \"\ acc_norm\": 0.3645320197044335,\n \"acc_norm_stderr\": 0.033864057460620905\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\"\ : 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.5454545454545454,\n \"acc_stderr\": 0.038881769216741004,\n\ \ \"acc_norm\": 0.5454545454545454,\n \"acc_norm_stderr\": 0.038881769216741004\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.4898989898989899,\n \"acc_stderr\": 0.03561625488673745,\n \"\ acc_norm\": 0.4898989898989899,\n \"acc_norm_stderr\": 0.03561625488673745\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.6424870466321243,\n \"acc_stderr\": 0.03458816042181012,\n\ \ \"acc_norm\": 0.6424870466321243,\n \"acc_norm_stderr\": 0.03458816042181012\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4128205128205128,\n \"acc_stderr\": 0.02496268356433182,\n \ \ \"acc_norm\": 0.4128205128205128,\n \"acc_norm_stderr\": 0.02496268356433182\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712166,\n \ \ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712166\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.3907563025210084,\n \"acc_stderr\": 0.031693802357129965,\n\ \ \"acc_norm\": 0.3907563025210084,\n \"acc_norm_stderr\": 0.031693802357129965\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"\ acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.5559633027522936,\n \"acc_stderr\": 0.021302621211654518,\n \"\ acc_norm\": 0.5559633027522936,\n \"acc_norm_stderr\": 0.021302621211654518\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.21296296296296297,\n \"acc_stderr\": 0.027920963147993656,\n \"\ acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.027920963147993656\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.4852941176470588,\n \"acc_stderr\": 0.03507793834791324,\n \"\ acc_norm\": 0.4852941176470588,\n \"acc_norm_stderr\": 0.03507793834791324\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.5147679324894515,\n \"acc_stderr\": 0.032533028078777386,\n \ \ \"acc_norm\": 0.5147679324894515,\n \"acc_norm_stderr\": 0.032533028078777386\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4618834080717489,\n\ \ \"acc_stderr\": 0.033460150119732274,\n \"acc_norm\": 0.4618834080717489,\n\ \ \"acc_norm_stderr\": 0.033460150119732274\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.46564885496183206,\n \"acc_stderr\": 0.04374928560599738,\n\ \ \"acc_norm\": 0.46564885496183206,\n \"acc_norm_stderr\": 0.04374928560599738\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5206611570247934,\n \"acc_stderr\": 0.04560456086387235,\n \"\ acc_norm\": 0.5206611570247934,\n \"acc_norm_stderr\": 0.04560456086387235\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4537037037037037,\n\ \ \"acc_stderr\": 0.048129173245368216,\n \"acc_norm\": 0.4537037037037037,\n\ \ \"acc_norm_stderr\": 0.048129173245368216\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.39263803680981596,\n \"acc_stderr\": 0.03836740907831029,\n\ \ \"acc_norm\": 0.39263803680981596,\n \"acc_norm_stderr\": 0.03836740907831029\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n\ \ \"acc_stderr\": 0.045218299028335865,\n \"acc_norm\": 0.3482142857142857,\n\ \ \"acc_norm_stderr\": 0.045218299028335865\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.5339805825242718,\n \"acc_stderr\": 0.0493929144727348,\n\ \ \"acc_norm\": 0.5339805825242718,\n \"acc_norm_stderr\": 0.0493929144727348\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.594017094017094,\n\ \ \"acc_stderr\": 0.03217180182641086,\n \"acc_norm\": 0.594017094017094,\n\ \ \"acc_norm_stderr\": 0.03217180182641086\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \ \ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5696040868454662,\n\ \ \"acc_stderr\": 0.017705868776292395,\n \"acc_norm\": 0.5696040868454662,\n\ \ \"acc_norm_stderr\": 0.017705868776292395\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.45375722543352603,\n \"acc_stderr\": 0.026803720583206184,\n\ \ \"acc_norm\": 0.45375722543352603,\n \"acc_norm_stderr\": 0.026803720583206184\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27039106145251396,\n\ \ \"acc_stderr\": 0.014854993938010078,\n \"acc_norm\": 0.27039106145251396,\n\ \ \"acc_norm_stderr\": 0.014854993938010078\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.028431095444176643,\n\ \ \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.028431095444176643\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5466237942122186,\n\ \ \"acc_stderr\": 0.02827435985489425,\n \"acc_norm\": 0.5466237942122186,\n\ \ \"acc_norm_stderr\": 0.02827435985489425\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5061728395061729,\n \"acc_stderr\": 0.027818623962583295,\n\ \ \"acc_norm\": 0.5061728395061729,\n \"acc_norm_stderr\": 0.027818623962583295\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.35815602836879434,\n \"acc_stderr\": 0.028602085862759422,\n \ \ \"acc_norm\": 0.35815602836879434,\n \"acc_norm_stderr\": 0.028602085862759422\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.30638852672750977,\n\ \ \"acc_stderr\": 0.011773980329380731,\n \"acc_norm\": 0.30638852672750977,\n\ \ \"acc_norm_stderr\": 0.011773980329380731\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.43014705882352944,\n \"acc_stderr\": 0.030074971917302875,\n\ \ \"acc_norm\": 0.43014705882352944,\n \"acc_norm_stderr\": 0.030074971917302875\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.42320261437908496,\n \"acc_stderr\": 0.019987809769482064,\n \ \ \"acc_norm\": 0.42320261437908496,\n \"acc_norm_stderr\": 0.019987809769482064\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4636363636363636,\n\ \ \"acc_stderr\": 0.047764491623961985,\n \"acc_norm\": 0.4636363636363636,\n\ \ \"acc_norm_stderr\": 0.047764491623961985\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.37142857142857144,\n \"acc_stderr\": 0.03093285879278985,\n\ \ \"acc_norm\": 0.37142857142857144,\n \"acc_norm_stderr\": 0.03093285879278985\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.527363184079602,\n\ \ \"acc_stderr\": 0.03530235517334682,\n \"acc_norm\": 0.527363184079602,\n\ \ \"acc_norm_stderr\": 0.03530235517334682\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n\ \ \"acc_stderr\": 0.03664314777288085,\n \"acc_norm\": 0.3313253012048193,\n\ \ \"acc_norm_stderr\": 0.03664314777288085\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6608187134502924,\n \"acc_stderr\": 0.03631053496488905,\n\ \ \"acc_norm\": 0.6608187134502924,\n \"acc_norm_stderr\": 0.03631053496488905\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28151774785801714,\n\ \ \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.42597374324834275,\n\ \ \"mc2_stderr\": 0.015781635062808143\n }\n}\n```" repo_url: https://huggingface.co/xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|arc:challenge|25_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hellaswag|10_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:43:19.802214.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:43:19.802214.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T20_43_19.802214 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T20:43:19.802214.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T20:43:19.802214.parquet' - config_name: results data_files: - split: 2023_08_29T20_43_19.802214 path: - results_2023-08-29T20:43:19.802214.parquet - split: latest path: - results_2023-08-29T20:43:19.802214.parquet --- # Dataset Card for Evaluation run of xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2](https://huggingface.co/xzuyn/LLaMa-2-LIMA-7B-QLoRA_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xzuyn__LLaMa-2-LIMA-7B-QLoRA_v2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T20:43:19.802214](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-LIMA-7B-QLoRA_v2/blob/main/results_2023-08-29T20%3A43%3A19.802214.json): ```python { "all": { "acc": 0.42772147627937, "acc_stderr": 0.0352621795234267, "acc_norm": 0.43164590880132125, "acc_norm_stderr": 0.03524758282529255, "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.42597374324834275, "mc2_stderr": 0.015781635062808143 }, "harness|arc:challenge|25": { "acc": 0.4880546075085324, "acc_stderr": 0.014607220340597171, "acc_norm": 0.5273037542662116, "acc_norm_stderr": 0.014589589101985994 }, "harness|hellaswag|10": { "acc": 0.6005775741884087, "acc_stderr": 0.004887787255353494, "acc_norm": 0.7928699462258514, "acc_norm_stderr": 0.004044213304049376 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4276315789473684, "acc_stderr": 0.04026097083296558, "acc_norm": 0.4276315789473684, "acc_norm_stderr": 0.04026097083296558 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.41509433962264153, "acc_stderr": 0.030325945789286105, "acc_norm": 0.41509433962264153, "acc_norm_stderr": 0.030325945789286105 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4583333333333333, "acc_stderr": 0.04166666666666665, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.04166666666666665 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3468208092485549, "acc_stderr": 0.036291466701596636, "acc_norm": 0.3468208092485549, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.03196758697835362, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.03196758697835362 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.04130740879555497, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.02293097307163335, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.02293097307163335 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.43548387096774194, "acc_stderr": 0.028206225591502737, "acc_norm": 0.43548387096774194, "acc_norm_stderr": 0.028206225591502737 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.033864057460620905, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.033864057460620905 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5454545454545454, "acc_stderr": 0.038881769216741004, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.038881769216741004 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.4898989898989899, "acc_stderr": 0.03561625488673745, "acc_norm": 0.4898989898989899, "acc_norm_stderr": 0.03561625488673745 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6424870466321243, "acc_stderr": 0.03458816042181012, "acc_norm": 0.6424870466321243, "acc_norm_stderr": 0.03458816042181012 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4128205128205128, "acc_stderr": 0.02496268356433182, "acc_norm": 0.4128205128205128, "acc_norm_stderr": 0.02496268356433182 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712166, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712166 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3907563025210084, "acc_stderr": 0.031693802357129965, "acc_norm": 0.3907563025210084, "acc_norm_stderr": 0.031693802357129965 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5559633027522936, "acc_stderr": 0.021302621211654518, "acc_norm": 0.5559633027522936, "acc_norm_stderr": 0.021302621211654518 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.21296296296296297, "acc_stderr": 0.027920963147993656, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.027920963147993656 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.4852941176470588, "acc_stderr": 0.03507793834791324, "acc_norm": 0.4852941176470588, "acc_norm_stderr": 0.03507793834791324 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5147679324894515, "acc_stderr": 0.032533028078777386, "acc_norm": 0.5147679324894515, "acc_norm_stderr": 0.032533028078777386 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.4618834080717489, "acc_stderr": 0.033460150119732274, "acc_norm": 0.4618834080717489, "acc_norm_stderr": 0.033460150119732274 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.46564885496183206, "acc_stderr": 0.04374928560599738, "acc_norm": 0.46564885496183206, "acc_norm_stderr": 0.04374928560599738 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5206611570247934, "acc_stderr": 0.04560456086387235, "acc_norm": 0.5206611570247934, "acc_norm_stderr": 0.04560456086387235 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4537037037037037, "acc_stderr": 0.048129173245368216, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.048129173245368216 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.39263803680981596, "acc_stderr": 0.03836740907831029, "acc_norm": 0.39263803680981596, "acc_norm_stderr": 0.03836740907831029 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.045218299028335865, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.045218299028335865 }, "harness|hendrycksTest-management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.0493929144727348, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.0493929144727348 }, "harness|hendrycksTest-marketing|5": { "acc": 0.594017094017094, "acc_stderr": 0.03217180182641086, "acc_norm": 0.594017094017094, "acc_norm_stderr": 0.03217180182641086 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5696040868454662, "acc_stderr": 0.017705868776292395, "acc_norm": 0.5696040868454662, "acc_norm_stderr": 0.017705868776292395 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.45375722543352603, "acc_stderr": 0.026803720583206184, "acc_norm": 0.45375722543352603, "acc_norm_stderr": 0.026803720583206184 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27039106145251396, "acc_stderr": 0.014854993938010078, "acc_norm": 0.27039106145251396, "acc_norm_stderr": 0.014854993938010078 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4411764705882353, "acc_stderr": 0.028431095444176643, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.028431095444176643 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5466237942122186, "acc_stderr": 0.02827435985489425, "acc_norm": 0.5466237942122186, "acc_norm_stderr": 0.02827435985489425 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5061728395061729, "acc_stderr": 0.027818623962583295, "acc_norm": 0.5061728395061729, "acc_norm_stderr": 0.027818623962583295 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.35815602836879434, "acc_stderr": 0.028602085862759422, "acc_norm": 0.35815602836879434, "acc_norm_stderr": 0.028602085862759422 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.30638852672750977, "acc_stderr": 0.011773980329380731, "acc_norm": 0.30638852672750977, "acc_norm_stderr": 0.011773980329380731 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.43014705882352944, "acc_stderr": 0.030074971917302875, "acc_norm": 0.43014705882352944, "acc_norm_stderr": 0.030074971917302875 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.42320261437908496, "acc_stderr": 0.019987809769482064, "acc_norm": 0.42320261437908496, "acc_norm_stderr": 0.019987809769482064 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4636363636363636, "acc_stderr": 0.047764491623961985, "acc_norm": 0.4636363636363636, "acc_norm_stderr": 0.047764491623961985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.37142857142857144, "acc_stderr": 0.03093285879278985, "acc_norm": 0.37142857142857144, "acc_norm_stderr": 0.03093285879278985 }, "harness|hendrycksTest-sociology|5": { "acc": 0.527363184079602, "acc_stderr": 0.03530235517334682, "acc_norm": 0.527363184079602, "acc_norm_stderr": 0.03530235517334682 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288085, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288085 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6608187134502924, "acc_stderr": 0.03631053496488905, "acc_norm": 0.6608187134502924, "acc_norm_stderr": 0.03631053496488905 }, "harness|truthfulqa:mc|0": { "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.42597374324834275, "mc2_stderr": 0.015781635062808143 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6846113801002502, -0.8057687878608704, 0.291756808757782, 0.22535119950771332, -0.20404908061027527, -0.04556041583418846, 0.05223940312862396, -0.268189013004303, 0.6012154817581177, -0.09542898088693619, -0.5192674994468689, -0.7090638875961304, -0.4449164867401123, 0.18104656040668488, -0.030671145766973495, 0.8158626556396484, -0.17244863510131836, -0.13339772820472717, 0.09919451922178268, -0.026741979643702507, -0.2664170563220978, -0.3909342288970947, -0.5489146709442139, -0.3252042829990387, 0.17696350812911987, 0.42561545968055725, 0.4864335358142853, 0.8391343355178833, 0.6612828373908997, 0.30036503076553345, -0.3496062159538269, -0.036266520619392395, -0.22455312311649323, -0.29608139395713806, 0.38942670822143555, -0.4088265895843506, -0.8745700120925903, 0.3100666403770447, 0.7998633980751038, 0.6501032114028931, -0.058988917618989944, 0.324222594499588, 0.027204308658838272, 0.5459665656089783, -0.2959025800228119, 0.06783925741910934, -0.3115881383419037, 0.28883522748947144, -0.22219881415367126, -0.2923927307128906, -0.24239404499530792, -0.26490989327430725, -0.08122306317090988, -0.9279614090919495, 0.258798211812973, 0.3165712058544159, 1.5829874277114868, -0.17754705250263214, -0.24190044403076172, 0.09488159418106079, -0.07062892615795135, 1.0463589429855347, -0.8318134546279907, 0.3426021337509155, 0.779353678226471, 0.14723941683769226, -0.17020420730113983, -0.603485107421875, -0.679419994354248, 0.12291920185089111, -0.3591117560863495, 0.3457082509994507, -0.06399659812450409, -0.2205708771944046, 0.34916165471076965, 0.6706792116165161, -0.6438876986503601, 0.17983809113502502, -0.6354567408561707, -0.1146416887640953, 1.060716152191162, 0.4015520215034485, 0.059167295694351196, -0.3320212960243225, -0.7052398920059204, -0.6673399209976196, -0.4076877534389496, 0.27931222319602966, 0.4373544156551361, 0.3950199484825134, -0.42305395007133484, 0.6384921669960022, -0.4185957610607147, 0.5215206742286682, 0.3907475471496582, 0.01149066723883152, 0.9295533299446106, -0.6790145635604858, -0.5513932108879089, -0.06452552229166031, 1.1249568462371826, 0.6139557361602783, 0.044255293905735016, 0.21278157830238342, 0.07738965004682541, -0.10446496307849884, -0.04569025710225105, -0.8633031249046326, -0.29931211471557617, 0.1844150274991989, -0.39233753085136414, -0.551027238368988, 0.34751030802726746, -0.8757483959197998, 0.13973070681095123, 0.0023669861257076263, 0.40944433212280273, -0.4227876365184784, -0.12213817983865738, 0.29239583015441895, -0.3795509338378906, 0.8330844640731812, -0.16101747751235962, -0.8327500224113464, 0.39604780077934265, 0.5043551921844482, 0.7900456786155701, -0.07828119397163391, -0.4061521291732788, -0.06016566976904869, -0.11027275770902634, -0.25997194647789, 0.5444965958595276, -0.23487895727157593, -0.4715942442417145, -0.27788788080215454, 0.3016672730445862, -0.24080811440944672, -0.3278366029262543, 0.7686955332756042, -0.22087912261486053, 0.20790117979049683, -0.4831083118915558, -0.6343669295310974, 0.1455218642950058, 0.39943698048591614, -0.42966005206108093, 1.2936142683029175, 0.26131170988082886, -0.824942409992218, 0.4700140655040741, -0.539074718952179, -0.1362866312265396, -0.03538521006703377, -0.07489849627017975, -0.8562443852424622, -0.31653162837028503, 0.17911584675312042, 0.38209396600723267, -0.19431865215301514, -0.16553324460983276, -0.3984347879886627, -0.3570837378501892, 0.35771721601486206, -0.18180707097053528, 1.283754587173462, -0.07801297307014465, -0.7709435224533081, -0.12067894637584686, -1.2828720808029175, 0.3712606430053711, 0.2678341865539551, -0.39337173104286194, -0.1362609565258026, -0.48373886942863464, -0.017184080556035042, 0.2259550541639328, 0.26609042286872864, -0.7855080962181091, 0.2617829740047455, -0.35323676466941833, 0.17108164727687836, 1.285968542098999, -0.0036171115934848785, 0.13485020399093628, -0.5629352927207947, 0.49933648109436035, 0.21528774499893188, 0.19135424494743347, 0.42067480087280273, -0.6367316842079163, -0.8090083599090576, -0.4720497727394104, -0.060321517288684845, 0.6125355362892151, -0.15762785077095032, 1.1558434963226318, 0.04093350097537041, -0.9309568405151367, -0.46350035071372986, -0.12699510157108307, 0.4593372941017151, 0.8402596712112427, 0.5852669477462769, -0.012143423780798912, -0.6577739119529724, -1.0296186208724976, -0.22927582263946533, -0.16579297184944153, 0.14728480577468872, 0.2150578647851944, 0.9861516356468201, -0.22018690407276154, 0.6058568358421326, -1.019964337348938, -0.23224957287311554, 0.1838192343711853, -0.12844593822956085, 0.8104991316795349, 0.7729851007461548, 0.6542584896087646, -0.6540559530258179, -0.5340056419372559, 0.18121647834777832, -0.9439623951911926, -0.07188741117715836, 0.10419865697622299, -0.3715898394584656, 0.04243965074419975, 0.09073995798826218, -0.7077043652534485, 0.5563969016075134, 0.2508859932422638, -1.1345552206039429, 1.0423485040664673, -0.31764206290245056, 0.5463254451751709, -1.0016603469848633, 0.17902030050754547, -0.09528347849845886, 0.022485636174678802, -0.50548255443573, 0.048000745475292206, 0.06227670982480049, 0.47100701928138733, -0.489245742559433, 0.7949122190475464, -0.686307430267334, -0.0603485107421875, 0.44184860587120056, 0.18340113759040833, -0.0810190811753273, 0.37117084860801697, -0.18579573929309845, 0.7695462703704834, 0.7792984843254089, -0.4551181197166443, 0.5468337535858154, 0.4583783745765686, -0.25378474593162537, 0.7478264570236206, -0.5157076716423035, -0.3094997704029083, 0.3007250428199768, -0.04841955378651619, -0.8711854815483093, -0.4174468219280243, 0.08276181668043137, -0.5958639979362488, -0.1473112851381302, 0.3862607777118683, -0.2657996714115143, -0.80397629737854, -0.9841999411582947, 0.36779308319091797, 0.6926723718643188, -0.46861347556114197, -0.23818618059158325, 0.04700053110718727, 0.11155524104833603, -0.7967317700386047, -0.8689067959785461, -0.4697866439819336, -0.22534991800785065, -0.715798556804657, 0.32159557938575745, -0.2871665060520172, -0.2670253813266754, -0.11212407797574997, -0.19077572226524353, -0.28839775919914246, 0.029076360166072845, 0.14232872426509857, 0.6751437187194824, -0.4375459849834442, -0.29409417510032654, -0.22060024738311768, -0.19529151916503906, 0.24947762489318848, -0.1379823237657547, 0.4006381332874298, -0.4206539988517761, -0.38122794032096863, -0.4888787269592285, -0.0043115695007145405, 0.681481122970581, -0.035602033138275146, 0.7621702551841736, 0.42476093769073486, -0.30525603890419006, 0.03250617906451225, -0.21375566720962524, -0.2592715322971344, -0.5864667296409607, 0.27802157402038574, -0.5303074717521667, -1.0664221048355103, 0.8071256279945374, 0.5632199645042419, 0.07635688781738281, 1.1180435419082642, 0.6220709085464478, -0.2974908947944641, 1.055179476737976, 0.03315506875514984, 0.3093181252479553, 0.3862159550189972, -0.7144487500190735, 0.0863851010799408, -0.9262433052062988, -0.357333779335022, -0.572898805141449, -0.4880102574825287, -0.7146927714347839, -0.08170761913061142, 0.25669437646865845, 0.15370768308639526, -0.7068334817886353, 0.5522710084915161, -0.8207271099090576, 0.5535820722579956, 0.569293737411499, 0.28156086802482605, 0.1701568365097046, -0.1327374428510666, -0.3830353617668152, -0.16816459596157074, -0.44854065775871277, -0.2473585158586502, 1.2364274263381958, 0.30528154969215393, 0.7076402306556702, 0.1284022033214569, 0.8733476996421814, 0.07740069925785065, -0.10378856211900711, -0.5735418796539307, 0.6217511296272278, 0.17975181341171265, -0.7913732528686523, -0.4037516117095947, -0.49966996908187866, -1.0906448364257812, 0.4696177542209625, -0.19729571044445038, -0.8702068328857422, 0.17616550624370575, 0.025892315432429314, -0.23345665633678436, 0.5314099788665771, -0.48747044801712036, 0.8213449120521545, -0.13809502124786377, -0.5053341388702393, 0.1423337757587433, -0.8305131196975708, 0.4438832700252533, 0.21017786860466003, 0.2413107454776764, 0.014136629179120064, 0.26626816391944885, 1.1777021884918213, -0.8758976459503174, 0.4582548141479492, 0.06489589065313339, -0.012246411293745041, 0.3471736013889313, -0.18204490840435028, 0.5153244137763977, 0.1334233283996582, -0.052735522389411926, -0.10680428892374039, 0.27101942896842957, -0.9021022319793701, -0.0653335303068161, 0.875146746635437, -1.0224545001983643, -0.6118093729019165, -0.9034534692764282, -0.5110036134719849, 0.07694477587938309, 0.5721815824508667, 0.4013766348361969, 0.4795953631401062, -0.010014883242547512, 0.42432454228401184, 0.8663816452026367, -0.09061501175165176, 0.5628584623336792, 0.19520415365695953, 0.09271344542503357, -0.6986957788467407, 0.8434439897537231, 0.09068146347999573, 0.36720624566078186, 0.2817746102809906, 0.3902876377105713, -0.5618107318878174, -0.23057855665683746, -0.2310246229171753, 0.5065953135490417, -0.6344588994979858, -0.29689353704452515, -0.38381513953208923, -0.3847138583660126, -0.7879384756088257, -0.6482478976249695, -0.2661850154399872, -0.5532976388931274, -0.5600671172142029, -0.5452990531921387, 0.5938999652862549, 0.42432525753974915, -0.39174073934555054, 0.13416384160518646, -0.46386224031448364, 0.23333993554115295, 0.36517301201820374, 0.5316604971885681, -0.3438693583011627, -0.6023391485214233, 0.05779826268553734, -0.11634013056755066, -0.5176373720169067, -0.940585196018219, 0.3125388026237488, -0.04088862985372543, 0.4972843527793884, 0.6545383334159851, 0.02519741654396057, 0.8477689027786255, -0.17641662061214447, 1.011545181274414, 0.33375948667526245, -0.7991433143615723, 0.7310910224914551, -0.33222657442092896, 0.1385805457830429, 0.6561433672904968, 0.15270473062992096, -0.21520154178142548, -0.6407338380813599, -1.2778866291046143, -0.7909992933273315, 0.6678335070610046, 0.39127546548843384, -0.221613347530365, 0.027875220403075218, 0.16631311178207397, -0.31693848967552185, -0.16722790896892548, -0.70023512840271, -0.9073214530944824, -0.16441316902637482, -0.47883307933807373, 0.13912437856197357, -0.00768657959997654, -0.3737746477127075, -0.8294774889945984, 0.8937222361564636, 0.028707999736070633, 0.5778694152832031, 0.44747376441955566, 0.08359890431165695, 0.07618318498134613, 0.4070204198360443, 0.9587813019752502, 0.6927387714385986, -0.46798890829086304, 0.3879817724227905, 0.40789103507995605, -1.0676629543304443, 0.5182018280029297, 0.28598371148109436, -0.09361916780471802, -0.03951020538806915, 0.4593167304992676, 0.42681369185447693, 0.04240568354725838, -0.1861191689968109, 0.6087323427200317, -0.047382477670907974, -0.5859045386314392, -0.4123908579349518, 0.07871728390455246, -0.1153598204255104, -0.00544019415974617, 0.3600058853626251, -0.2030593454837799, -0.05151553079485893, -0.513661801815033, 0.46581441164016724, 0.35207444429397583, -0.4787617325782776, -0.1777241975069046, 0.781374990940094, -0.21066084504127502, -0.08000203967094421, 0.3390471339225769, -0.20754098892211914, -0.6170922517776489, 1.1164110898971558, 0.6594215035438538, 0.6382017731666565, -0.26422974467277527, -0.08070766180753708, 0.9377521872520447, 0.3612504303455353, -0.016244640573859215, 0.5725271701812744, 0.2847224473953247, -0.30570998787879944, 0.26710212230682373, -0.8513181209564209, -0.027699897065758705, 0.0984942838549614, -0.8335137963294983, 0.2981071174144745, -0.6061162352561951, -0.23637773096561432, 0.005432082340121269, 0.47437480092048645, -0.44886279106140137, 0.5770879983901978, -0.3943609297275543, 1.2223610877990723, -0.9872875809669495, 0.7263548970222473, 0.6938419342041016, -0.5498437881469727, -1.0155072212219238, -0.596903383731842, 0.02034926600754261, -0.8649469017982483, 0.5762925148010254, -0.09881600737571716, 0.1609736829996109, -0.07214385271072388, -0.7113973498344421, -0.948785662651062, 1.4578731060028076, -0.08290550112724304, -0.45617440342903137, 0.2478073239326477, -0.008060568943619728, 0.4532279670238495, 0.13998927175998688, 0.5913797616958618, 0.78493332862854, 0.8080475926399231, -0.055689334869384766, -0.6930137276649475, 0.3622039258480072, -0.5176135897636414, -0.3198508322238922, 0.4140918552875519, -0.9886273145675659, 1.2472130060195923, -0.03047899156808853, 0.1820880025625229, -0.1340312957763672, 0.7241738438606262, 0.8175721168518066, 0.24524493515491486, 0.35094723105430603, 0.9259321093559265, 0.9262027740478516, -0.49382197856903076, 0.9484549164772034, -0.17661117017269135, 0.8539978861808777, 0.7235456109046936, 0.1781119704246521, 0.7768491506576538, 0.6524420380592346, -0.5734371542930603, 0.5789700150489807, 0.8175603151321411, -0.3231929838657379, 0.3848841190338135, 0.2757725715637207, -0.1792161464691162, -0.11630329489707947, 0.44699570536613464, -0.8858949542045593, 0.14286179840564728, 0.11928912997245789, -0.35916081070899963, 0.08685650676488876, -0.4771905839443207, 0.3935689628124237, -0.08005502074956894, -0.04143965244293213, 0.33272814750671387, 0.03794129192829132, -0.33882904052734375, 0.922559380531311, -0.13240529596805573, 0.7869710922241211, -0.4979758560657501, -0.07653909921646118, -0.38632091879844666, 0.586686372756958, -0.4424777030944824, -1.0717933177947998, 0.1208890825510025, 0.0504714660346508, -0.1433952897787094, -0.08973424136638641, 0.6516689658164978, -0.21292497217655182, -0.804528534412384, 0.13408583402633667, 0.039321478456258774, 0.10318522900342941, 0.5472939610481262, -0.7128860354423523, -0.30136317014694214, -0.051475878804922104, -0.5362783670425415, 0.12113068997859955, 0.3079852759838104, 0.25021833181381226, 0.5597670674324036, 0.6672177314758301, 0.18895766139030457, 0.440388947725296, -0.5227762460708618, 0.8067892789840698, -1.066395878791809, -0.7437744736671448, -0.9249898791313171, 0.42218756675720215, -0.35781121253967285, -0.8919404745101929, 1.0113985538482666, 1.0156232118606567, 0.9010436534881592, 0.05584261938929558, 0.6579896807670593, -0.3390296399593353, 0.2591155767440796, -0.3900053799152374, 0.9778825640678406, -0.8071884512901306, -0.2523364722728729, -0.2662866711616516, -0.6775107383728027, -0.44050833582878113, 0.875787079334259, -0.15798600018024445, 0.07039403915405273, 1.0130245685577393, 0.6542239189147949, -0.07240220904350281, 0.011462450958788395, -0.031014353036880493, 0.5482910871505737, 0.4171680808067322, 1.0303208827972412, 0.6229141354560852, -0.8544289469718933, 0.34952765703201294, -0.49690645933151245, -0.4279344975948334, -0.4157353937625885, -0.45996686816215515, -0.8782615661621094, -0.4565512537956238, -0.1860986053943634, -0.6239228248596191, -0.151654452085495, 1.000653862953186, 0.46449679136276245, -0.9096874594688416, -0.4344431161880493, -0.06329137831926346, 0.1694563329219818, -0.5787253975868225, -0.41332224011421204, 0.7901704907417297, -0.08706793189048767, -0.5757177472114563, 0.14823530614376068, -0.15896660089492798, 0.299908310174942, 0.11161312460899353, -0.4004322588443756, -0.7080354690551758, 0.002628074027597904, 0.46742480993270874, 0.3665372431278229, -0.6398881673812866, -0.7104294300079346, 0.2821415066719055, -0.5396388173103333, 0.4731380045413971, -0.03923186659812927, -0.5204077363014221, 0.06005442142486572, 0.7173386216163635, 0.44785529375076294, 0.6415160894393921, -0.03487055376172066, 0.03840809315443039, -0.6376317739486694, 0.2552318274974823, -0.05219925940036774, 0.2991352081298828, -0.03768960386514664, -0.33747878670692444, 0.7405362129211426, 0.6856064200401306, -0.49250903725624084, -1.0751882791519165, -0.466656357049942, -1.452659010887146, 0.01495447289198637, 1.0433580875396729, -0.027750734239816666, -0.5270188450813293, 0.23157045245170593, -0.12866739928722382, 0.16169999539852142, -0.28497064113616943, 0.7685853838920593, 0.7334596514701843, -0.3457556962966919, 0.11933554708957672, -0.6129539012908936, 0.38094425201416016, 0.5097490549087524, -1.1986751556396484, -0.09382785856723785, 0.17577748000621796, 0.3242475390434265, 0.329913467168808, 0.6920790076255798, -0.13748785853385925, 0.28410518169403076, 0.301973819732666, 0.0333852618932724, 0.04930717870593071, 0.08456722646951675, -0.2330961674451828, 0.053388115018606186, -0.21088284254074097, -0.47659775614738464 ]
open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-alpaca-test
open-llm-leaderboard
2023-08-29T20:51:09Z
201
0
[ "region:us" ]
null
2023-08-29T20:50:11Z
--- pretty_name: Evaluation run of CHIH-HUNG/llama-2-13b-alpaca-test dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [CHIH-HUNG/llama-2-13b-alpaca-test](https://huggingface.co/CHIH-HUNG/llama-2-13b-alpaca-test)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-alpaca-test\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-29T20:49:48.067362](https://huggingface.co/datasets/open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-alpaca-test/blob/main/results_2023-08-29T20%3A49%3A48.067362.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5568910569830451,\n\ \ \"acc_stderr\": 0.03436225133323378,\n \"acc_norm\": 0.5610380147243772,\n\ \ \"acc_norm_stderr\": 0.034342335699213765,\n \"mc1\": 0.2607099143206854,\n\ \ \"mc1_stderr\": 0.015368841620766372,\n \"mc2\": 0.3693612523342933,\n\ \ \"mc2_stderr\": 0.014364347604420232\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5571672354948806,\n \"acc_stderr\": 0.014515573873348899,\n\ \ \"acc_norm\": 0.6006825938566553,\n \"acc_norm_stderr\": 0.014312094557946704\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6117307309300936,\n\ \ \"acc_stderr\": 0.004863603638367449,\n \"acc_norm\": 0.8128858793069109,\n\ \ \"acc_norm_stderr\": 0.003892060546588329\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45925925925925926,\n\ \ \"acc_stderr\": 0.04304979692464242,\n \"acc_norm\": 0.45925925925925926,\n\ \ \"acc_norm_stderr\": 0.04304979692464242\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.040335656678483184,\n\ \ \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.040335656678483184\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\ \ \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \ \ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6150943396226415,\n \"acc_stderr\": 0.02994649856769995,\n\ \ \"acc_norm\": 0.6150943396226415,\n \"acc_norm_stderr\": 0.02994649856769995\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5625,\n\ \ \"acc_stderr\": 0.04148415739394154,\n \"acc_norm\": 0.5625,\n \ \ \"acc_norm_stderr\": 0.04148415739394154\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n\ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n\ \ \"acc_stderr\": 0.03807301726504511,\n \"acc_norm\": 0.5260115606936416,\n\ \ \"acc_norm_stderr\": 0.03807301726504511\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.04389869956808778,\n\ \ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.04389869956808778\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4340425531914894,\n \"acc_stderr\": 0.032400380867927465,\n\ \ \"acc_norm\": 0.4340425531914894,\n \"acc_norm_stderr\": 0.032400380867927465\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n\ \ \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n\ \ \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n\ \ \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3306878306878307,\n \"acc_stderr\": 0.024229965298425072,\n \"\ acc_norm\": 0.3306878306878307,\n \"acc_norm_stderr\": 0.024229965298425072\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n\ \ \"acc_stderr\": 0.04240799327574924,\n \"acc_norm\": 0.3412698412698413,\n\ \ \"acc_norm_stderr\": 0.04240799327574924\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6580645161290323,\n\ \ \"acc_stderr\": 0.026985289576552742,\n \"acc_norm\": 0.6580645161290323,\n\ \ \"acc_norm_stderr\": 0.026985289576552742\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n\ \ \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\"\ : 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.03681050869161551,\n\ \ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03681050869161551\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.702020202020202,\n \"acc_stderr\": 0.03258630383836557,\n \"acc_norm\"\ : 0.702020202020202,\n \"acc_norm_stderr\": 0.03258630383836557\n },\n\ \ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \ \ \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.02749350424454806,\n\ \ \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.02749350424454806\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5153846153846153,\n \"acc_stderr\": 0.025339003010106522,\n\ \ \"acc_norm\": 0.5153846153846153,\n \"acc_norm_stderr\": 0.025339003010106522\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \ \ \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.032145368597886394,\n\ \ \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.032145368597886394\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"\ acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7376146788990826,\n \"acc_stderr\": 0.018861885021534738,\n \"\ acc_norm\": 0.7376146788990826,\n \"acc_norm_stderr\": 0.018861885021534738\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4166666666666667,\n \"acc_stderr\": 0.03362277436608044,\n \"\ acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.03362277436608044\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7352941176470589,\n \"acc_stderr\": 0.030964517926923403,\n \"\ acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.030964517926923403\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293426,\n \ \ \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n\ \ \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n\ \ \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\ \ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.743801652892562,\n \"acc_stderr\": 0.03984979653302872,\n \"acc_norm\"\ : 0.743801652892562,\n \"acc_norm_stderr\": 0.03984979653302872\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n\ \ \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.7407407407407407,\n\ \ \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.036230899157241446,\n\ \ \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.036230899157241446\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n\ \ \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n\ \ \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n\ \ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8076923076923077,\n\ \ \"acc_stderr\": 0.025819233256483717,\n \"acc_norm\": 0.8076923076923077,\n\ \ \"acc_norm_stderr\": 0.025819233256483717\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \ \ \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7611749680715197,\n\ \ \"acc_stderr\": 0.015246803197398674,\n \"acc_norm\": 0.7611749680715197,\n\ \ \"acc_norm_stderr\": 0.015246803197398674\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6445086705202312,\n \"acc_stderr\": 0.025770292082977254,\n\ \ \"acc_norm\": 0.6445086705202312,\n \"acc_norm_stderr\": 0.025770292082977254\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27262569832402234,\n\ \ \"acc_stderr\": 0.014893391735249619,\n \"acc_norm\": 0.27262569832402234,\n\ \ \"acc_norm_stderr\": 0.014893391735249619\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6045751633986928,\n \"acc_stderr\": 0.02799672318063145,\n\ \ \"acc_norm\": 0.6045751633986928,\n \"acc_norm_stderr\": 0.02799672318063145\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6463022508038585,\n\ \ \"acc_stderr\": 0.027155208103200865,\n \"acc_norm\": 0.6463022508038585,\n\ \ \"acc_norm_stderr\": 0.027155208103200865\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6203703703703703,\n \"acc_stderr\": 0.02700252103451647,\n\ \ \"acc_norm\": 0.6203703703703703,\n \"acc_norm_stderr\": 0.02700252103451647\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4078014184397163,\n \"acc_stderr\": 0.029316011776343555,\n \ \ \"acc_norm\": 0.4078014184397163,\n \"acc_norm_stderr\": 0.029316011776343555\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4178617992177314,\n\ \ \"acc_stderr\": 0.012596744108998557,\n \"acc_norm\": 0.4178617992177314,\n\ \ \"acc_norm_stderr\": 0.012596744108998557\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4889705882352941,\n \"acc_stderr\": 0.03036544647727568,\n\ \ \"acc_norm\": 0.4889705882352941,\n \"acc_norm_stderr\": 0.03036544647727568\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.565359477124183,\n \"acc_stderr\": 0.02005426920072646,\n \ \ \"acc_norm\": 0.565359477124183,\n \"acc_norm_stderr\": 0.02005426920072646\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6090909090909091,\n\ \ \"acc_stderr\": 0.04673752333670239,\n \"acc_norm\": 0.6090909090909091,\n\ \ \"acc_norm_stderr\": 0.04673752333670239\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6122448979591837,\n \"acc_stderr\": 0.031192230726795656,\n\ \ \"acc_norm\": 0.6122448979591837,\n \"acc_norm_stderr\": 0.031192230726795656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n\ \ \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.7512437810945274,\n\ \ \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4397590361445783,\n\ \ \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.4397590361445783,\n\ \ \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.03274485211946956,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.03274485211946956\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2607099143206854,\n\ \ \"mc1_stderr\": 0.015368841620766372,\n \"mc2\": 0.3693612523342933,\n\ \ \"mc2_stderr\": 0.014364347604420232\n }\n}\n```" repo_url: https://huggingface.co/CHIH-HUNG/llama-2-13b-alpaca-test leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|arc:challenge|25_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hellaswag|10_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:49:48.067362.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-29T20:49:48.067362.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_29T20_49_48.067362 path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T20:49:48.067362.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-29T20:49:48.067362.parquet' - config_name: results data_files: - split: 2023_08_29T20_49_48.067362 path: - results_2023-08-29T20:49:48.067362.parquet - split: latest path: - results_2023-08-29T20:49:48.067362.parquet --- # Dataset Card for Evaluation run of CHIH-HUNG/llama-2-13b-alpaca-test ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/CHIH-HUNG/llama-2-13b-alpaca-test - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [CHIH-HUNG/llama-2-13b-alpaca-test](https://huggingface.co/CHIH-HUNG/llama-2-13b-alpaca-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-alpaca-test", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-29T20:49:48.067362](https://huggingface.co/datasets/open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-alpaca-test/blob/main/results_2023-08-29T20%3A49%3A48.067362.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5568910569830451, "acc_stderr": 0.03436225133323378, "acc_norm": 0.5610380147243772, "acc_norm_stderr": 0.034342335699213765, "mc1": 0.2607099143206854, "mc1_stderr": 0.015368841620766372, "mc2": 0.3693612523342933, "mc2_stderr": 0.014364347604420232 }, "harness|arc:challenge|25": { "acc": 0.5571672354948806, "acc_stderr": 0.014515573873348899, "acc_norm": 0.6006825938566553, "acc_norm_stderr": 0.014312094557946704 }, "harness|hellaswag|10": { "acc": 0.6117307309300936, "acc_stderr": 0.004863603638367449, "acc_norm": 0.8128858793069109, "acc_norm_stderr": 0.003892060546588329 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45925925925925926, "acc_stderr": 0.04304979692464242, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464242 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.040335656678483184, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.040335656678483184 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6150943396226415, "acc_stderr": 0.02994649856769995, "acc_norm": 0.6150943396226415, "acc_norm_stderr": 0.02994649856769995 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5625, "acc_stderr": 0.04148415739394154, "acc_norm": 0.5625, "acc_norm_stderr": 0.04148415739394154 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5260115606936416, "acc_stderr": 0.03807301726504511, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.03807301726504511 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.04389869956808778, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.04389869956808778 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4340425531914894, "acc_stderr": 0.032400380867927465, "acc_norm": 0.4340425531914894, "acc_norm_stderr": 0.032400380867927465 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3306878306878307, "acc_stderr": 0.024229965298425072, "acc_norm": 0.3306878306878307, "acc_norm_stderr": 0.024229965298425072 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574924, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574924 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6580645161290323, "acc_stderr": 0.026985289576552742, "acc_norm": 0.6580645161290323, "acc_norm_stderr": 0.026985289576552742 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4630541871921182, "acc_stderr": 0.035083705204426656, "acc_norm": 0.4630541871921182, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03681050869161551, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03681050869161551 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.702020202020202, "acc_stderr": 0.03258630383836557, "acc_norm": 0.702020202020202, "acc_norm_stderr": 0.03258630383836557 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.02749350424454806, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.02749350424454806 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5153846153846153, "acc_stderr": 0.025339003010106522, "acc_norm": 0.5153846153846153, "acc_norm_stderr": 0.025339003010106522 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5714285714285714, "acc_stderr": 0.032145368597886394, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.032145368597886394 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7376146788990826, "acc_stderr": 0.018861885021534738, "acc_norm": 0.7376146788990826, "acc_norm_stderr": 0.018861885021534738 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03362277436608044, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03362277436608044 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7352941176470589, "acc_stderr": 0.030964517926923403, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.030964517926923403 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.729957805907173, "acc_stderr": 0.028900721906293426, "acc_norm": 0.729957805907173, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302872, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302872 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094632, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094632 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.036230899157241446, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.036230899157241446 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8076923076923077, "acc_stderr": 0.025819233256483717, "acc_norm": 0.8076923076923077, "acc_norm_stderr": 0.025819233256483717 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7611749680715197, "acc_stderr": 0.015246803197398674, "acc_norm": 0.7611749680715197, "acc_norm_stderr": 0.015246803197398674 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6445086705202312, "acc_stderr": 0.025770292082977254, "acc_norm": 0.6445086705202312, "acc_norm_stderr": 0.025770292082977254 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249619, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249619 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6045751633986928, "acc_stderr": 0.02799672318063145, "acc_norm": 0.6045751633986928, "acc_norm_stderr": 0.02799672318063145 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6463022508038585, "acc_stderr": 0.027155208103200865, "acc_norm": 0.6463022508038585, "acc_norm_stderr": 0.027155208103200865 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6203703703703703, "acc_stderr": 0.02700252103451647, "acc_norm": 0.6203703703703703, "acc_norm_stderr": 0.02700252103451647 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4078014184397163, "acc_stderr": 0.029316011776343555, "acc_norm": 0.4078014184397163, "acc_norm_stderr": 0.029316011776343555 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4178617992177314, "acc_stderr": 0.012596744108998557, "acc_norm": 0.4178617992177314, "acc_norm_stderr": 0.012596744108998557 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4889705882352941, "acc_stderr": 0.03036544647727568, "acc_norm": 0.4889705882352941, "acc_norm_stderr": 0.03036544647727568 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.565359477124183, "acc_stderr": 0.02005426920072646, "acc_norm": 0.565359477124183, "acc_norm_stderr": 0.02005426920072646 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6090909090909091, "acc_stderr": 0.04673752333670239, "acc_norm": 0.6090909090909091, "acc_norm_stderr": 0.04673752333670239 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6122448979591837, "acc_stderr": 0.031192230726795656, "acc_norm": 0.6122448979591837, "acc_norm_stderr": 0.031192230726795656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7512437810945274, "acc_stderr": 0.030567675938916714, "acc_norm": 0.7512437810945274, "acc_norm_stderr": 0.030567675938916714 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699121, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.03274485211946956, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.03274485211946956 }, "harness|truthfulqa:mc|0": { "mc1": 0.2607099143206854, "mc1_stderr": 0.015368841620766372, "mc2": 0.3693612523342933, "mc2_stderr": 0.014364347604420232 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.740186870098114, -0.898178219795227, 0.26498985290527344, 0.24070604145526886, -0.18004262447357178, -0.05520348623394966, 0.03704116493463516, -0.27027034759521484, 0.6044703125953674, -0.07716058939695358, -0.49241796135902405, -0.6513463258743286, -0.44198092818260193, 0.25479936599731445, -0.059702951461076736, 0.8330550193786621, -0.21290135383605957, -0.15302839875221252, 0.10046254843473434, -0.047448430210351944, -0.2620369493961334, -0.3169219195842743, -0.45989325642585754, -0.383396178483963, 0.19410863518714905, 0.41316309571266174, 0.44929584860801697, 0.8441119194030762, 0.69756680727005, 0.27788475155830383, -0.3265511691570282, -0.037211690098047256, -0.13628976047039032, -0.33123156428337097, 0.41027969121932983, -0.3588734269142151, -0.8368050456047058, 0.34197017550468445, 0.7255906462669373, 0.6173300743103027, -0.10144651681184769, 0.2947283685207367, 0.038241565227508545, 0.5464276075363159, -0.3506832420825958, 0.03265589103102684, -0.28186315298080444, 0.20349322259426117, -0.19281719624996185, -0.27112236618995667, -0.2725762724876404, -0.2351875901222229, -0.1363995373249054, -0.8886659145355225, 0.2567998766899109, 0.3025718629360199, 1.59066641330719, -0.11326052248477936, -0.2607801556587219, 0.06795313209295273, -0.07199817150831223, 1.0081608295440674, -0.8545032739639282, 0.3456808030605316, 0.8479448556900024, 0.09179643541574478, -0.18324603140354156, -0.5504955649375916, -0.6324475407600403, 0.05654517188668251, -0.36392349004745483, 0.32905495166778564, -0.038077082484960556, -0.20643970370292664, 0.3701443672180176, 0.6642749905586243, -0.6289999485015869, 0.16901633143424988, -0.6471934914588928, -0.13800983130931854, 1.0607465505599976, 0.30176791548728943, 0.0428217388689518, -0.36923947930336, -0.7094090580940247, -0.664821982383728, -0.41546082496643066, 0.27131402492523193, 0.42259711027145386, 0.33930474519729614, -0.4365559220314026, 0.7013740539550781, -0.41463503241539, 0.5425853729248047, 0.42060646414756775, 0.0252106674015522, 0.8862787485122681, -0.6704301834106445, -0.5206883549690247, -0.06930908560752869, 1.0918179750442505, 0.5689711570739746, 0.046289410442113876, 0.2376948446035385, 0.011409755796194077, -0.0937165692448616, 0.009062480181455612, -0.8609465956687927, -0.2675366699695587, 0.14608009159564972, -0.4008319079875946, -0.528754711151123, 0.31943878531455994, -0.8778113722801208, 0.13418826460838318, -0.035437699407339096, 0.4183381199836731, -0.49749505519866943, -0.08758504688739777, 0.22339962422847748, -0.3946751058101654, 0.8773263692855835, -0.1895884871482849, -0.7911149859428406, 0.3841150104999542, 0.5255569815635681, 0.766792893409729, -0.10058870911598206, -0.46177229285240173, -0.13285104930400848, -0.12110678851604462, -0.3161822259426117, 0.5393897294998169, -0.2622608542442322, -0.392877459526062, -0.30185866355895996, 0.33413636684417725, -0.23396341502666473, -0.32146093249320984, 0.705653190612793, -0.21727119386196136, 0.19665758311748505, -0.4690088927745819, -0.6222790479660034, 0.12544216215610504, 0.37322795391082764, -0.3953721225261688, 1.2701821327209473, 0.2431037873029709, -0.8006786704063416, 0.41310665011405945, -0.5666406154632568, -0.186152383685112, -0.05487373471260071, -0.03157322481274605, -0.8010913133621216, -0.2773420810699463, 0.1711028665304184, 0.38579434156417847, -0.1654273420572281, -0.1274460405111313, -0.37966570258140564, -0.40421581268310547, 0.3574591279029846, -0.14252051711082458, 1.2112776041030884, -0.06063082814216614, -0.7894375324249268, -0.13499563932418823, -1.2322556972503662, 0.2905873656272888, 0.23254673182964325, -0.40152430534362793, -0.19688336551189423, -0.4513476490974426, -0.016888072714209557, 0.1631210595369339, 0.27541235089302063, -0.7988966703414917, 0.2785021960735321, -0.3476353883743286, 0.17781130969524384, 1.2511214017868042, 0.013667481020092964, 0.12208057194948196, -0.5557337999343872, 0.49899357557296753, 0.17075522243976593, 0.1869361400604248, 0.3750392496585846, -0.624433159828186, -0.8285879492759705, -0.4837139844894409, -0.061203304678201675, 0.6171572804450989, -0.15850433707237244, 1.1677508354187012, 0.13375309109687805, -0.8831143975257874, -0.4423263370990753, -0.12772619724273682, 0.487300306558609, 0.769408643245697, 0.60986328125, -0.06630315631628036, -0.5989952087402344, -1.0894157886505127, -0.25995877385139465, -0.19119198620319366, 0.1362614929676056, 0.17292071878910065, 1.0422956943511963, -0.3000016212463379, 0.5792893171310425, -1.0204893350601196, -0.19876056909561157, 0.21314503252506256, -0.05126705393195152, 0.7726299166679382, 0.7445803284645081, 0.5957236289978027, -0.6185861825942993, -0.47066155076026917, 0.1988566517829895, -0.8857810497283936, -0.10937780886888504, 0.12211663275957108, -0.31686216592788696, 0.13222533464431763, 0.12227517366409302, -0.7001321315765381, 0.5494592785835266, 0.22037717700004578, -1.0726476907730103, 1.0715852975845337, -0.31633204221725464, 0.5790825486183167, -0.9758803844451904, 0.17130085825920105, -0.05739869922399521, 0.038066040724515915, -0.46177467703819275, 0.03520617634057999, 0.0993468388915062, 0.45617276430130005, -0.44992733001708984, 0.7640382051467896, -0.6580367088317871, -0.08364228159189224, 0.3886045515537262, 0.09450694918632507, -0.08467134088277817, 0.3673301935195923, -0.23645178973674774, 0.8062462210655212, 0.726478636264801, -0.4544351100921631, 0.5090208649635315, 0.4473333954811096, -0.20954754948616028, 0.695210874080658, -0.5249183177947998, -0.28977155685424805, 0.3139411211013794, -0.025851696729660034, -0.7925061583518982, -0.5015923976898193, 0.060701604932546616, -0.5632685422897339, -0.13301214575767517, 0.4155133366584778, -0.24121376872062683, -0.8086020946502686, -0.9454547762870789, 0.3384665250778198, 0.6978701949119568, -0.4382748603820801, -0.20428204536437988, 0.04347074031829834, 0.10204793512821198, -0.8394249081611633, -0.8574119806289673, -0.4845559298992157, -0.22005608677864075, -0.6563583016395569, 0.31376969814300537, -0.2604975998401642, -0.2888880968093872, -0.11306022107601166, -0.20736631751060486, -0.3436179757118225, 0.019229615107178688, 0.12039008736610413, 0.6836825013160706, -0.4025372266769409, -0.2713411748409271, -0.22960509359836578, -0.1620962768793106, 0.24904395639896393, -0.05386807769536972, 0.3814537525177002, -0.5024513602256775, -0.41352686285972595, -0.46027466654777527, 0.0014547182945534587, 0.6943106055259705, -0.09712842851877213, 0.7288167476654053, 0.4274258613586426, -0.31141072511672974, 0.025316277518868446, -0.29156267642974854, -0.2829524278640747, -0.5793426036834717, 0.2692953944206238, -0.4651505947113037, -1.0199424028396606, 0.7918864488601685, 0.5612435936927795, 0.07525458186864853, 1.1240108013153076, 0.5959718227386475, -0.3015573024749756, 0.9855444431304932, 0.07280749827623367, 0.33672910928726196, 0.3862570524215698, -0.6728225946426392, 0.10184706002473831, -0.9197896122932434, -0.29302090406417847, -0.5840887427330017, -0.47809526324272156, -0.7096068263053894, -0.08953224122524261, 0.24184486269950867, 0.19377289712429047, -0.6999869346618652, 0.5985949635505676, -0.8375210762023926, 0.5892925262451172, 0.5454208254814148, 0.25071948766708374, 0.18104241788387299, -0.16355909407138824, -0.3671991229057312, -0.07501620799303055, -0.42685991525650024, -0.2626292407512665, 1.1976864337921143, 0.2862444818019867, 0.7462679147720337, 0.039303820580244064, 0.85601806640625, 0.09837468713521957, -0.0698673278093338, -0.6280961036682129, 0.6125312447547913, 0.14239567518234253, -0.7527501583099365, -0.3860442042350769, -0.48017558455467224, -1.0859615802764893, 0.38489145040512085, -0.1273677498102188, -0.838343620300293, 0.15590806305408478, 0.01196883711963892, -0.18695971369743347, 0.46706530451774597, -0.5298542976379395, 0.7889845967292786, -0.14519348740577698, -0.44722241163253784, 0.07320576906204224, -0.8326000571250916, 0.4918035566806793, 0.14643487334251404, 0.2603272795677185, 0.0184292234480381, 0.23101584613323212, 1.2032631635665894, -0.8468711376190186, 0.44162389636039734, 0.12967829406261444, 0.008414708077907562, 0.33324041962623596, -0.18014438450336456, 0.5025374889373779, 0.08436977118253708, -0.039503443986177444, -0.08155248314142227, 0.29596444964408875, -0.8605169057846069, -0.00570756196975708, 0.8996457457542419, -0.9752911925315857, -0.6043654680252075, -0.9002626538276672, -0.5527857542037964, 0.08678823709487915, 0.5451972484588623, 0.37206488847732544, 0.4958762526512146, 0.014391599223017693, 0.4414515495300293, 0.8079826831817627, -0.16351310908794403, 0.6083489656448364, 0.22090208530426025, 0.10808281600475311, -0.6598023772239685, 0.8029901385307312, 0.06172309070825577, 0.3627740442752838, 0.23404063284397125, 0.41434916853904724, -0.5156102776527405, -0.1548699289560318, -0.22955942153930664, 0.5343345999717712, -0.6252626776695251, -0.2701368033885956, -0.3490249812602997, -0.3867625594139099, -0.7400638461112976, -0.6571674346923828, -0.32180625200271606, -0.5143107175827026, -0.49064695835113525, -0.4753837287425995, 0.6221158504486084, 0.48339635133743286, -0.3871038556098938, 0.06618844717741013, -0.4648419916629791, 0.259889155626297, 0.3394920229911804, 0.542384922504425, -0.36182859539985657, -0.5403934121131897, 0.05045440420508385, -0.12019271403551102, -0.5972470641136169, -0.9797765612602234, 0.3192089796066284, -0.04141408950090408, 0.486461341381073, 0.605563223361969, 0.060148656368255615, 0.8439401984214783, -0.19779512286186218, 1.027889370918274, 0.34187451004981995, -0.7914795279502869, 0.7622863054275513, -0.3210397958755493, 0.15448778867721558, 0.6180534362792969, 0.16056300699710846, -0.18007609248161316, -0.6613655686378479, -1.2436871528625488, -0.7816541790962219, 0.6466444134712219, 0.4033699035644531, -0.2766040563583374, 0.016549432650208473, 0.18516193330287933, -0.2738908529281616, -0.18654775619506836, -0.6903996467590332, -0.8547598719596863, -0.1681683361530304, -0.4515140652656555, 0.11864566057920456, 0.04478214681148529, -0.40977397561073303, -0.8462864756584167, 0.9294188618659973, -0.004411760717630386, 0.5942298173904419, 0.47816869616508484, 0.07842481136322021, 0.035865668207407, 0.47542425990104675, 0.8889139294624329, 0.7517745494842529, -0.4539651572704315, 0.404221773147583, 0.42893898487091064, -1.0446805953979492, 0.499564528465271, 0.3304199278354645, -0.08917421847581863, -0.04714891314506531, 0.47264114022254944, 0.42339304089546204, 0.0031307085882872343, -0.21110743284225464, 0.6154717206954956, -0.018680496141314507, -0.5309002995491028, -0.3506670594215393, 0.08239094167947769, -0.10609333217144012, -0.014837545342743397, 0.4031582772731781, -0.17289838194847107, -0.060994599014520645, -0.5156192779541016, 0.4825517237186432, 0.36409011483192444, -0.4697418212890625, -0.1652892678976059, 0.7285997867584229, -0.18413157761096954, -0.17047706246376038, 0.33022668957710266, -0.17853045463562012, -0.6273254156112671, 1.159500002861023, 0.6323367953300476, 0.6607108116149902, -0.2809775769710541, -0.05809374526143074, 0.8993300199508667, 0.4038863778114319, -0.03902280703186989, 0.5013747811317444, 0.2990153729915619, -0.232479527592659, 0.17089781165122986, -0.8942517042160034, -0.05417921394109726, 0.15905091166496277, -0.8518898487091064, 0.3338794708251953, -0.5172666907310486, -0.19534970819950104, -0.0040533472783863544, 0.4161686897277832, -0.43983110785484314, 0.5321570634841919, -0.443063348531723, 1.1854333877563477, -0.9807832837104797, 0.7055357098579407, 0.7354348301887512, -0.5144006609916687, -0.998907208442688, -0.5104931592941284, 0.017360933125019073, -0.8206285238265991, 0.5356185436248779, 0.005902600474655628, 0.13956160843372345, -0.11357083916664124, -0.7133431434631348, -0.9269881248474121, 1.3935123682022095, -0.07168646156787872, -0.43886277079582214, 0.25038638710975647, -0.0470857098698616, 0.4513278901576996, 0.15626199543476105, 0.5654776692390442, 0.7848549485206604, 0.799458384513855, -0.09917180985212326, -0.7284786701202393, 0.3643897771835327, -0.5063410401344299, -0.3294624388217926, 0.4409582018852234, -0.9578151702880859, 1.192379355430603, -0.004167858511209488, 0.23074443638324738, -0.15485937893390656, 0.6760203838348389, 0.8281717896461487, 0.2849368155002594, 0.38359367847442627, 0.8861443996429443, 0.86337810754776, -0.4877193570137024, 1.0333665609359741, -0.19219402968883514, 0.852823793888092, 0.6618989706039429, 0.21084286272525787, 0.7825988531112671, 0.6835605502128601, -0.5382668375968933, 0.5546404719352722, 0.8327510952949524, -0.3122546374797821, 0.3836976885795593, 0.27076342701911926, -0.10188769549131393, -0.12697738409042358, 0.3826630413532257, -0.9117274284362793, 0.15371711552143097, 0.07384254783391953, -0.3388199508190155, 0.10178607702255249, -0.4455239474773407, 0.3163987398147583, -0.11719857156276703, 0.002027951180934906, 0.3285831809043884, 0.06860671937465668, -0.41763126850128174, 0.9685245752334595, -0.14021196961402893, 0.78725665807724, -0.5324745178222656, -0.07339053601026535, -0.37940359115600586, 0.5853373408317566, -0.4190624952316284, -1.020315170288086, 0.2050287276506424, 0.0969010591506958, -0.11367350071668625, -0.12649096548557281, 0.6986197829246521, -0.17143651843070984, -0.7772444486618042, 0.1666872203350067, 0.06670641154050827, 0.08640570938587189, 0.5351120233535767, -0.6580743193626404, -0.3118218183517456, -0.07783275097608566, -0.5727878212928772, 0.11658986657857895, 0.2566322386264801, 0.2549830973148346, 0.5485910773277283, 0.636370062828064, 0.1332816332578659, 0.3830653131008148, -0.5424479842185974, 0.7927971482276917, -1.0591238737106323, -0.7295814156532288, -0.8999019861221313, 0.38699349761009216, -0.29518741369247437, -0.8918729424476624, 0.9483044743537903, 1.0157920122146606, 0.8525083065032959, 0.037853024899959564, 0.6089528799057007, -0.3910250663757324, 0.22567956149578094, -0.3633190989494324, 0.9549769759178162, -0.859261691570282, -0.22072166204452515, -0.26794809103012085, -0.7228964567184448, -0.342599481344223, 0.8860540390014648, -0.13443630933761597, 0.025354312732815742, 1.0641872882843018, 0.6602814197540283, -0.1023394986987114, 0.0670381486415863, -0.10857967287302017, 0.5836473703384399, 0.40054428577423096, 0.9819011688232422, 0.6759098172187805, -0.808895468711853, 0.3143453598022461, -0.5245540142059326, -0.45285308361053467, -0.3658908009529114, -0.46110180020332336, -0.8659185767173767, -0.4748697280883789, -0.2229950875043869, -0.6009445190429688, -0.10759138315916061, 1.002403736114502, 0.4662623405456543, -0.917084276676178, -0.439825177192688, -0.09777366369962692, 0.146687850356102, -0.6115778684616089, -0.41757434606552124, 0.7419043183326721, -0.10053137689828873, -0.5340558886528015, 0.2080954611301422, -0.1610226184129715, 0.25139451026916504, 0.11376409232616425, -0.39599448442459106, -0.7455375790596008, 0.04495669901371002, 0.4536592364311218, 0.3347443640232086, -0.7028577923774719, -0.7382714152336121, 0.3018093407154083, -0.5279600024223328, 0.42998549342155457, -0.06339288502931595, -0.5337904095649719, -0.004777648951858282, 0.6820672154426575, 0.500933825969696, 0.6619071960449219, -0.022956009954214096, 0.08517169207334518, -0.6309154033660889, 0.18736131489276886, -0.0377684086561203, 0.29354774951934814, -0.021347301080822945, -0.3303569555282593, 0.791886568069458, 0.6635876297950745, -0.5490284562110901, -1.0626220703125, -0.4327889382839203, -1.4439878463745117, -0.0419890396296978, 1.0979423522949219, 0.025198129937052727, -0.4987372159957886, 0.2589300572872162, -0.10675489902496338, 0.23472841084003448, -0.2866380214691162, 0.8096084594726562, 0.7972961068153381, -0.3676683306694031, 0.14572769403457642, -0.647698163986206, 0.3531648814678192, 0.5379432439804077, -1.2252583503723145, -0.0740533098578453, 0.2351207584142685, 0.3059985637664795, 0.3569979965686798, 0.6119486093521118, -0.09426385164260864, 0.277653306722641, 0.20844125747680664, 0.04875053092837334, 0.013680078089237213, 0.07032230496406555, -0.22965477406978607, 0.027643799781799316, -0.2881258726119995, -0.47000306844711304 ]
open-llm-leaderboard/details_fangloveskari__Dolphin_ORCA_LLaMA_70b_QLoRA
open-llm-leaderboard
2023-08-30T03:09:59Z
201
0
[ "region:us" ]
null
2023-08-30T03:09:01Z
--- pretty_name: Evaluation run of fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA](https://huggingface.co/fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fangloveskari__Dolphin_ORCA_LLaMA_70b_QLoRA\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-30T03:08:37.403827](https://huggingface.co/datasets/open-llm-leaderboard/details_fangloveskari__Dolphin_ORCA_LLaMA_70b_QLoRA/blob/main/results_2023-08-30T03%3A08%3A37.403827.json):\n\ \n```python\n{\n \"all\": {\n \"acc\": 0.7016950821019889,\n \"\ acc_stderr\": 0.03100773424505602,\n \"acc_norm\": 0.7055688798324372,\n\ \ \"acc_norm_stderr\": 0.030976198338743925,\n \"mc1\": 0.4528763769889841,\n\ \ \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.6337134354987094,\n\ \ \"mc2_stderr\": 0.014897273290786066\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6834470989761092,\n \"acc_stderr\": 0.01359243151906808,\n\ \ \"acc_norm\": 0.7226962457337884,\n \"acc_norm_stderr\": 0.013082095839059374\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6881099382593109,\n\ \ \"acc_stderr\": 0.004623184227344766,\n \"acc_norm\": 0.877414857598088,\n\ \ \"acc_norm_stderr\": 0.0032729014349397656\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n\ \ \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n\ \ \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8026315789473685,\n \"acc_stderr\": 0.03238981601699397,\n\ \ \"acc_norm\": 0.8026315789473685,\n \"acc_norm_stderr\": 0.03238981601699397\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n\ \ \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7471698113207547,\n \"acc_stderr\": 0.026749899771241214,\n\ \ \"acc_norm\": 0.7471698113207547,\n \"acc_norm_stderr\": 0.026749899771241214\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8263888888888888,\n\ \ \"acc_stderr\": 0.031674733837957166,\n \"acc_norm\": 0.8263888888888888,\n\ \ \"acc_norm_stderr\": 0.031674733837957166\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n\ \ \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n\ \ \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n\ \ \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n\ \ \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n\ \ \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6851063829787234,\n \"acc_stderr\": 0.030363582197238167,\n\ \ \"acc_norm\": 0.6851063829787234,\n \"acc_norm_stderr\": 0.030363582197238167\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\ \ \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n\ \ \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.03996629574876719,\n\ \ \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.03996629574876719\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.47883597883597884,\n \"acc_stderr\": 0.025728230952130723,\n \"\ acc_norm\": 0.47883597883597884,\n \"acc_norm_stderr\": 0.025728230952130723\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \ \ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8096774193548387,\n\ \ \"acc_stderr\": 0.022331707611823074,\n \"acc_norm\": 0.8096774193548387,\n\ \ \"acc_norm_stderr\": 0.022331707611823074\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.541871921182266,\n \"acc_stderr\": 0.03505630140785741,\n\ \ \"acc_norm\": 0.541871921182266,\n \"acc_norm_stderr\": 0.03505630140785741\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8484848484848485,\n \"acc_stderr\": 0.027998073798781678,\n\ \ \"acc_norm\": 0.8484848484848485,\n \"acc_norm_stderr\": 0.027998073798781678\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821677,\n \"\ acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821677\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9326424870466321,\n \"acc_stderr\": 0.0180883938390789,\n\ \ \"acc_norm\": 0.9326424870466321,\n \"acc_norm_stderr\": 0.0180883938390789\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7025641025641025,\n \"acc_stderr\": 0.023177408131465946,\n\ \ \"acc_norm\": 0.7025641025641025,\n \"acc_norm_stderr\": 0.023177408131465946\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n \ \ \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.027553614467863814,\n\ \ \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.027553614467863814\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.4768211920529801,\n \"acc_stderr\": 0.04078093859163083,\n \"\ acc_norm\": 0.4768211920529801,\n \"acc_norm_stderr\": 0.04078093859163083\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.9027522935779817,\n \"acc_stderr\": 0.012703533408540366,\n \"\ acc_norm\": 0.9027522935779817,\n \"acc_norm_stderr\": 0.012703533408540366\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.5925925925925926,\n \"acc_stderr\": 0.033509916046960436,\n \"\ acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.033509916046960436\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.9264705882352942,\n \"acc_stderr\": 0.01831885585008968,\n \"\ acc_norm\": 0.9264705882352942,\n \"acc_norm_stderr\": 0.01831885585008968\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8945147679324894,\n \"acc_stderr\": 0.01999556072375854,\n \ \ \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.01999556072375854\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7757847533632287,\n\ \ \"acc_stderr\": 0.027991534258519517,\n \"acc_norm\": 0.7757847533632287,\n\ \ \"acc_norm_stderr\": 0.027991534258519517\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8396946564885496,\n \"acc_stderr\": 0.03217829420744632,\n\ \ \"acc_norm\": 0.8396946564885496,\n \"acc_norm_stderr\": 0.03217829420744632\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035196,\n \"\ acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035196\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n\ \ \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n\ \ \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8220858895705522,\n \"acc_stderr\": 0.03004735765580663,\n\ \ \"acc_norm\": 0.8220858895705522,\n \"acc_norm_stderr\": 0.03004735765580663\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n\ \ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n\ \ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n\ \ \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n\ \ \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n\ \ \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8659003831417624,\n\ \ \"acc_stderr\": 0.012185528166499978,\n \"acc_norm\": 0.8659003831417624,\n\ \ \"acc_norm_stderr\": 0.012185528166499978\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7716763005780347,\n \"acc_stderr\": 0.022598703804321635,\n\ \ \"acc_norm\": 0.7716763005780347,\n \"acc_norm_stderr\": 0.022598703804321635\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5843575418994413,\n\ \ \"acc_stderr\": 0.016482782187500683,\n \"acc_norm\": 0.5843575418994413,\n\ \ \"acc_norm_stderr\": 0.016482782187500683\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7516339869281046,\n \"acc_stderr\": 0.02473998135511359,\n\ \ \"acc_norm\": 0.7516339869281046,\n \"acc_norm_stderr\": 0.02473998135511359\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7620578778135049,\n\ \ \"acc_stderr\": 0.024185150647818707,\n \"acc_norm\": 0.7620578778135049,\n\ \ \"acc_norm_stderr\": 0.024185150647818707\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157382,\n\ \ \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157382\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5673758865248227,\n \"acc_stderr\": 0.029555454236778838,\n \ \ \"acc_norm\": 0.5673758865248227,\n \"acc_norm_stderr\": 0.029555454236778838\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5710560625814863,\n\ \ \"acc_stderr\": 0.012640625443067365,\n \"acc_norm\": 0.5710560625814863,\n\ \ \"acc_norm_stderr\": 0.012640625443067365\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7205882352941176,\n \"acc_stderr\": 0.027257202606114948,\n\ \ \"acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.027257202606114948\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.761437908496732,\n \"acc_stderr\": 0.01724238582877962,\n \ \ \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.01724238582877962\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n\ \ \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n\ \ \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \ \ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n\ \ \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8905472636815921,\n\ \ \"acc_stderr\": 0.02207632610182466,\n \"acc_norm\": 0.8905472636815921,\n\ \ \"acc_norm_stderr\": 0.02207632610182466\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \ \ \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n\ \ \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.5301204819277109,\n\ \ \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n\ \ \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4528763769889841,\n\ \ \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.6337134354987094,\n\ \ \"mc2_stderr\": 0.014897273290786066\n }\n}\n```" repo_url: https://huggingface.co/fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|arc:challenge|25_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hellaswag|10_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T03:08:37.403827.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T03:08:37.403827.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_30T03_08_37.403827 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T03:08:37.403827.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T03:08:37.403827.parquet' - config_name: results data_files: - split: 2023_08_30T03_08_37.403827 path: - results_2023-08-30T03:08:37.403827.parquet - split: latest path: - results_2023-08-30T03:08:37.403827.parquet --- # Dataset Card for Evaluation run of fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA](https://huggingface.co/fangloveskari/Dolphin_ORCA_LLaMA_70b_QLoRA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_fangloveskari__Dolphin_ORCA_LLaMA_70b_QLoRA", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-30T03:08:37.403827](https://huggingface.co/datasets/open-llm-leaderboard/details_fangloveskari__Dolphin_ORCA_LLaMA_70b_QLoRA/blob/main/results_2023-08-30T03%3A08%3A37.403827.json): ```python { "all": { "acc": 0.7016950821019889, "acc_stderr": 0.03100773424505602, "acc_norm": 0.7055688798324372, "acc_norm_stderr": 0.030976198338743925, "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.6337134354987094, "mc2_stderr": 0.014897273290786066 }, "harness|arc:challenge|25": { "acc": 0.6834470989761092, "acc_stderr": 0.01359243151906808, "acc_norm": 0.7226962457337884, "acc_norm_stderr": 0.013082095839059374 }, "harness|hellaswag|10": { "acc": 0.6881099382593109, "acc_stderr": 0.004623184227344766, "acc_norm": 0.877414857598088, "acc_norm_stderr": 0.0032729014349397656 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8026315789473685, "acc_stderr": 0.03238981601699397, "acc_norm": 0.8026315789473685, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7471698113207547, "acc_stderr": 0.026749899771241214, "acc_norm": 0.7471698113207547, "acc_norm_stderr": 0.026749899771241214 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8263888888888888, "acc_stderr": 0.031674733837957166, "acc_norm": 0.8263888888888888, "acc_norm_stderr": 0.031674733837957166 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6851063829787234, "acc_stderr": 0.030363582197238167, "acc_norm": 0.6851063829787234, "acc_norm_stderr": 0.030363582197238167 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.03996629574876719, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.03996629574876719 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47883597883597884, "acc_stderr": 0.025728230952130723, "acc_norm": 0.47883597883597884, "acc_norm_stderr": 0.025728230952130723 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8096774193548387, "acc_stderr": 0.022331707611823074, "acc_norm": 0.8096774193548387, "acc_norm_stderr": 0.022331707611823074 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.541871921182266, "acc_stderr": 0.03505630140785741, "acc_norm": 0.541871921182266, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8484848484848485, "acc_stderr": 0.027998073798781678, "acc_norm": 0.8484848484848485, "acc_norm_stderr": 0.027998073798781678 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821677, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821677 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9326424870466321, "acc_stderr": 0.0180883938390789, "acc_norm": 0.9326424870466321, "acc_norm_stderr": 0.0180883938390789 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7025641025641025, "acc_stderr": 0.023177408131465946, "acc_norm": 0.7025641025641025, "acc_norm_stderr": 0.023177408131465946 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7647058823529411, "acc_stderr": 0.027553614467863814, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.027553614467863814 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4768211920529801, "acc_stderr": 0.04078093859163083, "acc_norm": 0.4768211920529801, "acc_norm_stderr": 0.04078093859163083 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9027522935779817, "acc_stderr": 0.012703533408540366, "acc_norm": 0.9027522935779817, "acc_norm_stderr": 0.012703533408540366 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5925925925925926, "acc_stderr": 0.033509916046960436, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.033509916046960436 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.01831885585008968, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.01831885585008968 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8945147679324894, "acc_stderr": 0.01999556072375854, "acc_norm": 0.8945147679324894, "acc_norm_stderr": 0.01999556072375854 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7757847533632287, "acc_stderr": 0.027991534258519517, "acc_norm": 0.7757847533632287, "acc_norm_stderr": 0.027991534258519517 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8396946564885496, "acc_stderr": 0.03217829420744632, "acc_norm": 0.8396946564885496, "acc_norm_stderr": 0.03217829420744632 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035196, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035196 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8220858895705522, "acc_stderr": 0.03004735765580663, "acc_norm": 0.8220858895705522, "acc_norm_stderr": 0.03004735765580663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8659003831417624, "acc_stderr": 0.012185528166499978, "acc_norm": 0.8659003831417624, "acc_norm_stderr": 0.012185528166499978 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7716763005780347, "acc_stderr": 0.022598703804321635, "acc_norm": 0.7716763005780347, "acc_norm_stderr": 0.022598703804321635 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5843575418994413, "acc_stderr": 0.016482782187500683, "acc_norm": 0.5843575418994413, "acc_norm_stderr": 0.016482782187500683 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7516339869281046, "acc_stderr": 0.02473998135511359, "acc_norm": 0.7516339869281046, "acc_norm_stderr": 0.02473998135511359 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7620578778135049, "acc_stderr": 0.024185150647818707, "acc_norm": 0.7620578778135049, "acc_norm_stderr": 0.024185150647818707 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157382, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157382 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5673758865248227, "acc_stderr": 0.029555454236778838, "acc_norm": 0.5673758865248227, "acc_norm_stderr": 0.029555454236778838 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5710560625814863, "acc_stderr": 0.012640625443067365, "acc_norm": 0.5710560625814863, "acc_norm_stderr": 0.012640625443067365 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7205882352941176, "acc_stderr": 0.027257202606114948, "acc_norm": 0.7205882352941176, "acc_norm_stderr": 0.027257202606114948 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.761437908496732, "acc_stderr": 0.01724238582877962, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.01724238582877962 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8905472636815921, "acc_stderr": 0.02207632610182466, "acc_norm": 0.8905472636815921, "acc_norm_stderr": 0.02207632610182466 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866766, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.6337134354987094, "mc2_stderr": 0.014897273290786066 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.745465099811554, -0.8323371410369873, 0.2673604488372803, 0.2266542911529541, -0.22025972604751587, -0.05621737986803055, 0.06783138960599899, -0.28118520975112915, 0.6259973645210266, -0.06927256286144257, -0.4975597560405731, -0.721308171749115, -0.4656465947628021, 0.24079588055610657, 0.005159721244126558, 0.7928494215011597, -0.13163098692893982, -0.15592215955257416, 0.10451871156692505, -0.061548128724098206, -0.276842325925827, -0.35607847571372986, -0.5728301405906677, -0.3437465727329254, 0.19995634257793427, 0.3826494812965393, 0.4939191937446594, 0.8473753333091736, 0.7024673819541931, 0.29233118891716003, -0.3753371834754944, 0.0159702617675066, -0.18476970493793488, -0.31628260016441345, 0.35570335388183594, -0.3536723554134369, -0.9046173691749573, 0.30390822887420654, 0.7621520757675171, 0.7003957033157349, -0.04442057013511658, 0.33412742614746094, 0.03855364769697189, 0.5971019864082336, -0.30973801016807556, 0.0921197161078453, -0.25664234161376953, 0.28098317980766296, -0.21817047894001007, -0.3641360104084015, -0.25431498885154724, -0.29694730043411255, -0.08704570680856705, -0.9304674863815308, 0.21274229884147644, 0.32597917318344116, 1.5823414325714111, -0.18153101205825806, -0.2703229486942291, 0.08568538725376129, -0.056116219609975815, 1.027642846107483, -0.8861393332481384, 0.35533711314201355, 0.769943356513977, 0.10362528264522552, -0.1940665990114212, -0.5536962747573853, -0.6567158102989197, 0.10201560705900192, -0.36509376764297485, 0.37101781368255615, -0.03009139746427536, -0.21116748452186584, 0.36075928807258606, 0.644314706325531, -0.6531857252120972, 0.14390258491039276, -0.6396472454071045, -0.13857322931289673, 1.0693840980529785, 0.3806482255458832, 0.08929529041051865, -0.3435097932815552, -0.6835734248161316, -0.6918660402297974, -0.3978104293346405, 0.2913566529750824, 0.4706530272960663, 0.3843550682067871, -0.41321080923080444, 0.7030120491981506, -0.39919060468673706, 0.5893356800079346, 0.41010650992393494, -0.014190653339028358, 0.9295392632484436, -0.6318954229354858, -0.5246883034706116, -0.05081528052687645, 1.1001890897750854, 0.6457714438438416, 0.0687069520354271, 0.23361051082611084, 0.06766275316476822, -0.1080603152513504, -0.010314827784895897, -0.8535952568054199, -0.26910242438316345, 0.20243066549301147, -0.45163458585739136, -0.5153879523277283, 0.35776379704475403, -0.9143937230110168, 0.11192332953214645, -0.0174111295491457, 0.42177894711494446, -0.471380352973938, -0.17866897583007812, 0.3293108344078064, -0.41721972823143005, 0.8549112677574158, -0.1857413500547409, -0.8021770715713501, 0.38228723406791687, 0.5060188174247742, 0.7742790579795837, -0.0859450176358223, -0.4184568226337433, -0.040782634168863297, -0.06227479502558708, -0.29194608330726624, 0.5754818916320801, -0.2649674713611603, -0.4899712800979614, -0.27154645323753357, 0.2960182726383209, -0.2653837502002716, -0.32665520906448364, 0.7302826642990112, -0.25357139110565186, 0.14773105084896088, -0.4629442095756531, -0.6332602500915527, 0.10039206594228745, 0.40994328260421753, -0.45740193128585815, 1.3024605512619019, 0.21201223134994507, -0.8943151235580444, 0.49055537581443787, -0.6207384467124939, -0.1767652928829193, -0.02788577601313591, -0.053065188229084015, -0.8283950686454773, -0.3195202946662903, 0.20012979209423065, 0.4003935158252716, -0.1853121668100357, -0.14407497644424438, -0.43225324153900146, -0.3479214012622833, 0.33562126755714417, -0.1538851410150528, 1.2842772006988525, -0.0408848412334919, -0.7382729649543762, -0.14019881188869476, -1.2730120420455933, 0.3312954008579254, 0.2638443112373352, -0.42978382110595703, -0.14239050447940826, -0.48286253213882446, -0.05982162430882454, 0.1576605588197708, 0.28857123851776123, -0.8200375437736511, 0.28204384446144104, -0.35799410939216614, 0.16235820949077606, 1.254432201385498, 0.05213361233472824, 0.14828744530677795, -0.5666863322257996, 0.5227765440940857, 0.21932582557201385, 0.20797744393348694, 0.4373055100440979, -0.6326436996459961, -0.7939043641090393, -0.4509879946708679, -0.0536382794380188, 0.5884535908699036, -0.20545074343681335, 1.152565360069275, 0.05884107947349548, -0.9213362336158752, -0.45021185278892517, -0.173353374004364, 0.41600504517555237, 0.8386726379394531, 0.5845174193382263, -0.06588766723871231, -0.6548604369163513, -1.0639108419418335, -0.2723561227321625, -0.17112557590007782, 0.16210664808750153, 0.285828173160553, 0.9911544919013977, -0.20053167641162872, 0.6558437347412109, -1.0407049655914307, -0.23085737228393555, 0.16908860206604004, -0.08827663213014603, 0.8323471546173096, 0.7850419282913208, 0.6486320495605469, -0.6900114417076111, -0.5248035192489624, 0.1773008108139038, -0.9858946800231934, -0.03658118471503258, 0.13367395102977753, -0.3433149456977844, 0.05282047763466835, 0.1358691155910492, -0.7099587321281433, 0.5804159045219421, 0.24546122550964355, -1.0901992321014404, 1.0345823764801025, -0.3650975823402405, 0.5736735463142395, -0.9748945236206055, 0.22999568283557892, -0.0832386389374733, 0.06299645453691483, -0.4765678942203522, 0.03664826229214668, 0.06359320878982544, 0.4349670708179474, -0.5128205418586731, 0.7901557683944702, -0.7244608998298645, -0.0698479488492012, 0.4397973418235779, 0.13321571052074432, -0.12985293567180634, 0.38052356243133545, -0.13995914161205292, 0.7893412113189697, 0.8212121725082397, -0.5109960436820984, 0.5048791766166687, 0.41920241713523865, -0.22058990597724915, 0.7721592783927917, -0.5071773529052734, -0.25813528895378113, 0.22150790691375732, -0.026337482035160065, -0.8490481376647949, -0.45692238211631775, 0.05754194036126137, -0.6178805232048035, -0.08814691007137299, 0.3915235102176666, -0.22892214357852936, -0.847606360912323, -0.9709702134132385, 0.3803752362728119, 0.6409379243850708, -0.4878598749637604, -0.13151423633098602, 0.09094760566949844, 0.11541932821273804, -0.8342810869216919, -0.8357675671577454, -0.46713051199913025, -0.2301887422800064, -0.7180441617965698, 0.29691725969314575, -0.28997525572776794, -0.2057373821735382, -0.0852329283952713, -0.21260453760623932, -0.29158926010131836, 0.025411032140254974, 0.13954900205135345, 0.6804788112640381, -0.4600907564163208, -0.3084251880645752, -0.24317671358585358, -0.1836431473493576, 0.22997474670410156, -0.15514688193798065, 0.4149419665336609, -0.44554397463798523, -0.405038982629776, -0.4527760148048401, 0.0023098071105778217, 0.6798739433288574, -0.07189320772886276, 0.7901268601417542, 0.43971681594848633, -0.29581722617149353, 0.039220668375492096, -0.2718310058116913, -0.2671602964401245, -0.5873437523841858, 0.25816455483436584, -0.5292123556137085, -1.0861486196517944, 0.8106003403663635, 0.5141783952713013, 0.07251758873462677, 1.164607048034668, 0.5723920464515686, -0.3002936542034149, 1.062850832939148, 0.0708957314491272, 0.29698649048805237, 0.3580568730831146, -0.7133371829986572, 0.09088049083948135, -0.9283122420310974, -0.3772987723350525, -0.5880665183067322, -0.4778670072555542, -0.7035893201828003, -0.051942527294158936, 0.2961631715297699, 0.12921863794326782, -0.7180899381637573, 0.5700781345367432, -0.8969034552574158, 0.5809189081192017, 0.5682904720306396, 0.32840532064437866, 0.15599124133586884, -0.1620728224515915, -0.38815340399742126, -0.13191784918308258, -0.4724169075489044, -0.2505628168582916, 1.2749594449996948, 0.3007981777191162, 0.7139368653297424, 0.11747680604457855, 0.8708643317222595, 0.1089610904455185, -0.06830104440450668, -0.5716581344604492, 0.647820770740509, 0.16116821765899658, -0.8492403030395508, -0.4349343478679657, -0.5160950422286987, -1.1092318296432495, 0.4349379539489746, -0.18477553129196167, -0.8801411390304565, 0.1674368530511856, 0.011526434682309628, -0.23000140488147736, 0.5277956128120422, -0.4762156903743744, 0.8140899538993835, -0.12188097834587097, -0.45481979846954346, 0.16499033570289612, -0.8298642635345459, 0.44936272501945496, 0.1851368546485901, 0.25646838545799255, 0.04213663935661316, 0.2400953322649002, 1.2213762998580933, -0.8476096987724304, 0.46737420558929443, 0.04789135605096817, 0.027201777324080467, 0.3755556344985962, -0.17048956453800201, 0.5149317383766174, 0.11329952627420425, -0.051269058138132095, -0.10020094364881516, 0.2833040654659271, -0.866858184337616, -0.010555067099630833, 0.843475878238678, -1.0029581785202026, -0.605977475643158, -0.9251443147659302, -0.49024108052253723, 0.040110696107149124, 0.5857800245285034, 0.3867921829223633, 0.49548235535621643, 0.01665686070919037, 0.4446287155151367, 0.8729044795036316, -0.1680637001991272, 0.5750382542610168, 0.26219379901885986, 0.13350172340869904, -0.6904463171958923, 0.851036548614502, 0.06771541386842728, 0.3290000557899475, 0.2583722174167633, 0.40650349855422974, -0.5921168327331543, -0.25604456663131714, -0.21849462389945984, 0.5364930033683777, -0.6551780700683594, -0.30871182680130005, -0.4233076274394989, -0.38313785195350647, -0.7891185879707336, -0.6369013786315918, -0.27999356389045715, -0.46980753540992737, -0.5372524261474609, -0.5170792937278748, 0.6466814279556274, 0.46268245577812195, -0.4185131788253784, 0.06477700173854828, -0.5047652125358582, 0.2516072988510132, 0.38027796149253845, 0.5517756342887878, -0.30872809886932373, -0.6103227734565735, 0.10503032803535461, -0.13154982030391693, -0.5160601735115051, -0.9573525190353394, 0.32529813051223755, -0.0327988862991333, 0.4905586540699005, 0.5999467968940735, 0.0137697234749794, 0.8924555778503418, -0.16310329735279083, 1.0431053638458252, 0.36439740657806396, -0.7802836298942566, 0.7306118011474609, -0.37975314259529114, 0.1582787036895752, 0.6747271418571472, 0.1895420104265213, -0.22718654572963715, -0.6519615054130554, -1.279712200164795, -0.8036426901817322, 0.7026938796043396, 0.347184956073761, -0.23093876242637634, 0.024284709244966507, 0.17804881930351257, -0.3178512752056122, -0.1824108064174652, -0.6954675316810608, -0.9169778227806091, -0.16640354692935944, -0.45162734389305115, 0.1209990605711937, 0.02226412110030651, -0.34181803464889526, -0.8325461745262146, 0.9152507781982422, 0.0028716465458273888, 0.5623412132263184, 0.47217240929603577, 0.11184345930814743, 0.05609693378210068, 0.4084601402282715, 0.9386418461799622, 0.7056242823600769, -0.49498850107192993, 0.38217154145240784, 0.4081430435180664, -1.1072138547897339, 0.47282809019088745, 0.268184632062912, -0.08813676983118057, -0.06486859917640686, 0.46535563468933105, 0.3818468451499939, 0.03014688938856125, -0.1749950498342514, 0.6119349598884583, -0.0314766950905323, -0.5723828077316284, -0.3924108147621155, 0.039987023919820786, -0.07590418308973312, -0.01783822476863861, 0.38818037509918213, -0.17449383437633514, -0.04340032860636711, -0.5297712683677673, 0.4524962306022644, 0.3516593277454376, -0.49160709977149963, -0.14510923624038696, 0.7879929542541504, -0.21918851137161255, -0.0990459993481636, 0.31719616055488586, -0.19271589815616608, -0.6126962304115295, 1.1619218587875366, 0.6314785480499268, 0.6456435322761536, -0.27620747685432434, -0.08068423718214035, 0.9753072261810303, 0.3720044195652008, -0.03962557017803192, 0.5973154902458191, 0.32186660170555115, -0.2920379936695099, 0.24235974252223969, -0.9074912071228027, -0.055604469031095505, 0.1401631087064743, -0.8267560601234436, 0.2577025890350342, -0.5945428013801575, -0.2765287160873413, 0.027100136503577232, 0.4210231602191925, -0.4823130965232849, 0.5725902318954468, -0.3969813287258148, 1.2457807064056396, -1.0016345977783203, 0.7087434530258179, 0.7709864974021912, -0.5879309773445129, -1.121201515197754, -0.6031524538993835, 0.001915962086059153, -0.8324129581451416, 0.6230031251907349, -0.1104365885257721, 0.15687614679336548, -0.06430429220199585, -0.7347837090492249, -0.9585567712783813, 1.4108085632324219, -0.07310448586940765, -0.37086454033851624, 0.30447056889533997, 0.008834803476929665, 0.4596325159072876, 0.10696093738079071, 0.6403544545173645, 0.7902160882949829, 0.8363001346588135, -0.034051571041345596, -0.7428340911865234, 0.347525030374527, -0.49793824553489685, -0.3288347125053406, 0.4111759066581726, -0.9759907722473145, 1.255624771118164, 0.018960028886795044, 0.20057238638401031, -0.1915193647146225, 0.7116702795028687, 0.8179203867912292, 0.2517605721950531, 0.38947826623916626, 0.8998622894287109, 0.8731063604354858, -0.4698898196220398, 0.9638833403587341, -0.18131548166275024, 0.8698569536209106, 0.6645517945289612, 0.1982475072145462, 0.7823348641395569, 0.6390700340270996, -0.5724472999572754, 0.5203233361244202, 0.8426097631454468, -0.29756706953048706, 0.3796488046646118, 0.2669098675251007, -0.1285652071237564, -0.12793338298797607, 0.3967136740684509, -0.9022799730300903, 0.17194680869579315, 0.10119833797216415, -0.3435879051685333, 0.07996866106987, -0.47240903973579407, 0.32660505175590515, -0.0779956728219986, -0.06816060841083527, 0.37619122862815857, 0.04448694735765457, -0.3932269811630249, 0.9529423117637634, -0.17869485914707184, 0.8187914490699768, -0.5040205121040344, -0.07817219942808151, -0.3815400302410126, 0.5707834959030151, -0.47111520171165466, -1.0899659395217896, 0.1127576008439064, 0.05727964639663696, -0.10926652699708939, -0.12062952667474747, 0.6871136426925659, -0.17997129261493683, -0.7797017097473145, 0.13485059142112732, 0.03586887568235397, 0.13861878216266632, 0.54489666223526, -0.7415898442268372, -0.30418160557746887, -0.04684744030237198, -0.5541632771492004, 0.11872539669275284, 0.3126909136772156, 0.3067707419395447, 0.5758745074272156, 0.6649842858314514, 0.2305721789598465, 0.41865888237953186, -0.522429347038269, 0.8156163692474365, -1.0786652565002441, -0.7295156121253967, -0.9014664888381958, 0.3950818181037903, -0.35794955492019653, -0.8723640441894531, 1.0007168054580688, 1.0532126426696777, 0.8560799360275269, -0.004239986650645733, 0.6571317315101624, -0.3782538175582886, 0.30034929513931274, -0.3867584466934204, 1.0169506072998047, -0.8688511252403259, -0.2666565477848053, -0.2783513069152832, -0.7334742546081543, -0.4165498614311218, 0.8771166801452637, -0.16919802129268646, 0.08557981997728348, 1.0399609804153442, 0.6697463393211365, -0.10664661973714828, 0.07642146199941635, -0.06824851781129837, 0.5383878350257874, 0.40573641657829285, 1.0149905681610107, 0.6362202763557434, -0.809806764125824, 0.3567211925983429, -0.4956982433795929, -0.42244789004325867, -0.38127025961875916, -0.5301939249038696, -0.8940789103507996, -0.4357186257839203, -0.21983128786087036, -0.6058045029640198, -0.16980727016925812, 0.9961046576499939, 0.46495458483695984, -0.9200119972229004, -0.4540901780128479, -0.060527753084897995, 0.09965768456459045, -0.5843868851661682, -0.4128149747848511, 0.8318873047828674, -0.05025968328118324, -0.536142110824585, 0.16284172236919403, -0.10246126353740692, 0.2782973051071167, 0.11815842241048813, -0.3797440230846405, -0.723965048789978, 0.025452138856053352, 0.4539777338504791, 0.42482471466064453, -0.6366287469863892, -0.7484602928161621, 0.2667293846607208, -0.534619927406311, 0.43613526225090027, -0.06070124730467796, -0.5219241380691528, 0.07907750457525253, 0.6871089935302734, 0.4762258231639862, 0.7020971179008484, -0.0015158661408349872, 0.020545566454529762, -0.6128934621810913, 0.25364160537719727, 0.0005849840817973018, 0.27296367287635803, -0.053755052387714386, -0.3217647969722748, 0.7833689451217651, 0.6722742915153503, -0.4903208017349243, -1.0598797798156738, -0.49726513028144836, -1.4950666427612305, 0.00322377379052341, 1.0698579549789429, -0.014088251627981663, -0.5724036693572998, 0.2073700875043869, -0.1385217010974884, 0.1654471755027771, -0.3194260895252228, 0.7617968320846558, 0.7341344356536865, -0.3543005585670471, 0.15967155992984772, -0.6210428476333618, 0.3878230154514313, 0.5179341435432434, -1.1995500326156616, -0.12008257210254669, 0.16112582385540009, 0.31976017355918884, 0.35255685448646545, 0.6821335554122925, -0.1395363211631775, 0.2699717879295349, 0.28843411803245544, 0.046619296073913574, 0.006991691421717405, 0.11158375442028046, -0.19584274291992188, 0.0836803987622261, -0.21880370378494263, -0.4727568030357361 ]
open-llm-leaderboard/details_quantumaikr__QuantumLM-llama2-70B-Korean-LoRA
open-llm-leaderboard
2023-08-30T07:54:46Z
201
0
[ "region:us" ]
null
2023-08-30T07:53:48Z
--- pretty_name: Evaluation run of quantumaikr/QuantumLM-llama2-70B-Korean-LoRA dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [quantumaikr/QuantumLM-llama2-70B-Korean-LoRA](https://huggingface.co/quantumaikr/QuantumLM-llama2-70B-Korean-LoRA)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_quantumaikr__QuantumLM-llama2-70B-Korean-LoRA\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-30T07:53:24.183560](https://huggingface.co/datasets/open-llm-leaderboard/details_quantumaikr__QuantumLM-llama2-70B-Korean-LoRA/blob/main/results_2023-08-30T07%3A53%3A24.183560.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6934168799483462,\n\ \ \"acc_stderr\": 0.03115919348812645,\n \"acc_norm\": 0.6971494359890498,\n\ \ \"acc_norm_stderr\": 0.031131669600877022,\n \"mc1\": 0.401468788249694,\n\ \ \"mc1_stderr\": 0.017160273901693654,\n \"mc2\": 0.5608488880093394,\n\ \ \"mc2_stderr\": 0.014874770245335572\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6749146757679181,\n \"acc_stderr\": 0.013688147309729119,\n\ \ \"acc_norm\": 0.7056313993174061,\n \"acc_norm_stderr\": 0.013318528460539422\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6743676558454491,\n\ \ \"acc_stderr\": 0.004676529200753001,\n \"acc_norm\": 0.8638717386974706,\n\ \ \"acc_norm_stderr\": 0.0034222387022263645\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n\ \ \"acc_stderr\": 0.04244633238353228,\n \"acc_norm\": 0.5925925925925926,\n\ \ \"acc_norm_stderr\": 0.04244633238353228\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.03064360707167709,\n\ \ \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.03064360707167709\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n\ \ \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n\ \ \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.031164899666948617,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.031164899666948617\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \ \ \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n\ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n\ \ \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n\ \ \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082635,\n\ \ \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082635\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.73,\n\ \ \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6808510638297872,\n \"acc_stderr\": 0.030472973363380045,\n\ \ \"acc_norm\": 0.6808510638297872,\n \"acc_norm_stderr\": 0.030472973363380045\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n\ \ \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n\ \ \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n\ \ \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.43915343915343913,\n \"acc_stderr\": 0.02555992055053101,\n \"\ acc_norm\": 0.43915343915343913,\n \"acc_norm_stderr\": 0.02555992055053101\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n\ \ \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n\ \ \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8451612903225807,\n\ \ \"acc_stderr\": 0.020579287326583227,\n \"acc_norm\": 0.8451612903225807,\n\ \ \"acc_norm_stderr\": 0.020579287326583227\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.03510766597959217,\n\ \ \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.03510766597959217\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\"\ : 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8242424242424242,\n \"acc_stderr\": 0.02972094300622445,\n\ \ \"acc_norm\": 0.8242424242424242,\n \"acc_norm_stderr\": 0.02972094300622445\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"\ acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.01673108529360756,\n\ \ \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.01673108529360756\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7256410256410256,\n \"acc_stderr\": 0.022622765767493225,\n\ \ \"acc_norm\": 0.7256410256410256,\n \"acc_norm_stderr\": 0.022622765767493225\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083015,\n \ \ \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083015\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7857142857142857,\n \"acc_stderr\": 0.026653531596715484,\n\ \ \"acc_norm\": 0.7857142857142857,\n \"acc_norm_stderr\": 0.026653531596715484\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.45695364238410596,\n \"acc_stderr\": 0.04067325174247443,\n \"\ acc_norm\": 0.45695364238410596,\n \"acc_norm_stderr\": 0.04067325174247443\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8844036697247707,\n \"acc_stderr\": 0.013708749534172636,\n \"\ acc_norm\": 0.8844036697247707,\n \"acc_norm_stderr\": 0.013708749534172636\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.5879629629629629,\n \"acc_stderr\": 0.03356787758160831,\n \"\ acc_norm\": 0.5879629629629629,\n \"acc_norm_stderr\": 0.03356787758160831\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.9068627450980392,\n \"acc_stderr\": 0.020397853969427,\n \"acc_norm\"\ : 0.9068627450980392,\n \"acc_norm_stderr\": 0.020397853969427\n },\n\ \ \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\":\ \ 0.8734177215189873,\n \"acc_stderr\": 0.02164419572795517,\n \"\ acc_norm\": 0.8734177215189873,\n \"acc_norm_stderr\": 0.02164419572795517\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7757847533632287,\n\ \ \"acc_stderr\": 0.027991534258519517,\n \"acc_norm\": 0.7757847533632287,\n\ \ \"acc_norm_stderr\": 0.027991534258519517\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8320610687022901,\n \"acc_stderr\": 0.032785485373431386,\n\ \ \"acc_norm\": 0.8320610687022901,\n \"acc_norm_stderr\": 0.032785485373431386\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8842975206611571,\n \"acc_stderr\": 0.029199802455622814,\n \"\ acc_norm\": 0.8842975206611571,\n \"acc_norm_stderr\": 0.029199802455622814\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8518518518518519,\n\ \ \"acc_stderr\": 0.03434300243631,\n \"acc_norm\": 0.8518518518518519,\n\ \ \"acc_norm_stderr\": 0.03434300243631\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.030446777687971726,\n\ \ \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.030446777687971726\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5267857142857143,\n\ \ \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.5267857142857143,\n\ \ \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.0398913985953177,\n\ \ \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.0398913985953177\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n\ \ \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n\ \ \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8531289910600255,\n\ \ \"acc_stderr\": 0.012658201736147278,\n \"acc_norm\": 0.8531289910600255,\n\ \ \"acc_norm_stderr\": 0.012658201736147278\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7687861271676301,\n \"acc_stderr\": 0.022698657167855713,\n\ \ \"acc_norm\": 0.7687861271676301,\n \"acc_norm_stderr\": 0.022698657167855713\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5094972067039106,\n\ \ \"acc_stderr\": 0.01671948464334877,\n \"acc_norm\": 0.5094972067039106,\n\ \ \"acc_norm_stderr\": 0.01671948464334877\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7679738562091504,\n \"acc_stderr\": 0.024170840879340873,\n\ \ \"acc_norm\": 0.7679738562091504,\n \"acc_norm_stderr\": 0.024170840879340873\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7588424437299035,\n\ \ \"acc_stderr\": 0.024296594034763426,\n \"acc_norm\": 0.7588424437299035,\n\ \ \"acc_norm_stderr\": 0.024296594034763426\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.022021366100220194,\n\ \ \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.022021366100220194\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5567375886524822,\n \"acc_stderr\": 0.02963483847376601,\n \ \ \"acc_norm\": 0.5567375886524822,\n \"acc_norm_stderr\": 0.02963483847376601\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5560625814863103,\n\ \ \"acc_stderr\": 0.012689708167787677,\n \"acc_norm\": 0.5560625814863103,\n\ \ \"acc_norm_stderr\": 0.012689708167787677\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7536764705882353,\n \"acc_stderr\": 0.02617343857052,\n\ \ \"acc_norm\": 0.7536764705882353,\n \"acc_norm_stderr\": 0.02617343857052\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7630718954248366,\n \"acc_stderr\": 0.01720166216978977,\n \ \ \"acc_norm\": 0.7630718954248366,\n \"acc_norm_stderr\": 0.01720166216978977\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n\ \ \"acc_stderr\": 0.04309118709946458,\n \"acc_norm\": 0.7181818181818181,\n\ \ \"acc_norm_stderr\": 0.04309118709946458\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7755102040816326,\n \"acc_stderr\": 0.0267114305555384,\n\ \ \"acc_norm\": 0.7755102040816326,\n \"acc_norm_stderr\": 0.0267114305555384\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8955223880597015,\n\ \ \"acc_stderr\": 0.021628920516700643,\n \"acc_norm\": 0.8955223880597015,\n\ \ \"acc_norm_stderr\": 0.021628920516700643\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.93,\n \"acc_stderr\": 0.0256432399976243,\n \ \ \"acc_norm\": 0.93,\n \"acc_norm_stderr\": 0.0256432399976243\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n\ \ \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n\ \ \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.02709729011807082,\n\ \ \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.02709729011807082\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.401468788249694,\n\ \ \"mc1_stderr\": 0.017160273901693654,\n \"mc2\": 0.5608488880093394,\n\ \ \"mc2_stderr\": 0.014874770245335572\n }\n}\n```" repo_url: https://huggingface.co/quantumaikr/QuantumLM-llama2-70B-Korean-LoRA leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|arc:challenge|25_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hellaswag|10_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T07:53:24.183560.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T07:53:24.183560.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_30T07_53_24.183560 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T07:53:24.183560.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T07:53:24.183560.parquet' - config_name: results data_files: - split: 2023_08_30T07_53_24.183560 path: - results_2023-08-30T07:53:24.183560.parquet - split: latest path: - results_2023-08-30T07:53:24.183560.parquet --- # Dataset Card for Evaluation run of quantumaikr/QuantumLM-llama2-70B-Korean-LoRA ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/quantumaikr/QuantumLM-llama2-70B-Korean-LoRA - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [quantumaikr/QuantumLM-llama2-70B-Korean-LoRA](https://huggingface.co/quantumaikr/QuantumLM-llama2-70B-Korean-LoRA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_quantumaikr__QuantumLM-llama2-70B-Korean-LoRA", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-30T07:53:24.183560](https://huggingface.co/datasets/open-llm-leaderboard/details_quantumaikr__QuantumLM-llama2-70B-Korean-LoRA/blob/main/results_2023-08-30T07%3A53%3A24.183560.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6934168799483462, "acc_stderr": 0.03115919348812645, "acc_norm": 0.6971494359890498, "acc_norm_stderr": 0.031131669600877022, "mc1": 0.401468788249694, "mc1_stderr": 0.017160273901693654, "mc2": 0.5608488880093394, "mc2_stderr": 0.014874770245335572 }, "harness|arc:challenge|25": { "acc": 0.6749146757679181, "acc_stderr": 0.013688147309729119, "acc_norm": 0.7056313993174061, "acc_norm_stderr": 0.013318528460539422 }, "harness|hellaswag|10": { "acc": 0.6743676558454491, "acc_stderr": 0.004676529200753001, "acc_norm": 0.8638717386974706, "acc_norm_stderr": 0.0034222387022263645 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04244633238353228, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04244633238353228 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.03064360707167709, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.03064360707167709 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8333333333333334, "acc_stderr": 0.031164899666948617, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.031164899666948617 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082635, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082635 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.04461960433384739, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6808510638297872, "acc_stderr": 0.030472973363380045, "acc_norm": 0.6808510638297872, "acc_norm_stderr": 0.030472973363380045 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43915343915343913, "acc_stderr": 0.02555992055053101, "acc_norm": 0.43915343915343913, "acc_norm_stderr": 0.02555992055053101 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8451612903225807, "acc_stderr": 0.020579287326583227, "acc_norm": 0.8451612903225807, "acc_norm_stderr": 0.020579287326583227 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5320197044334976, "acc_stderr": 0.03510766597959217, "acc_norm": 0.5320197044334976, "acc_norm_stderr": 0.03510766597959217 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8242424242424242, "acc_stderr": 0.02972094300622445, "acc_norm": 0.8242424242424242, "acc_norm_stderr": 0.02972094300622445 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.01673108529360756, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.01673108529360756 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7256410256410256, "acc_stderr": 0.022622765767493225, "acc_norm": 0.7256410256410256, "acc_norm_stderr": 0.022622765767493225 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083015, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083015 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7857142857142857, "acc_stderr": 0.026653531596715484, "acc_norm": 0.7857142857142857, "acc_norm_stderr": 0.026653531596715484 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.45695364238410596, "acc_stderr": 0.04067325174247443, "acc_norm": 0.45695364238410596, "acc_norm_stderr": 0.04067325174247443 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8844036697247707, "acc_stderr": 0.013708749534172636, "acc_norm": 0.8844036697247707, "acc_norm_stderr": 0.013708749534172636 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5879629629629629, "acc_stderr": 0.03356787758160831, "acc_norm": 0.5879629629629629, "acc_norm_stderr": 0.03356787758160831 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9068627450980392, "acc_stderr": 0.020397853969427, "acc_norm": 0.9068627450980392, "acc_norm_stderr": 0.020397853969427 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8734177215189873, "acc_stderr": 0.02164419572795517, "acc_norm": 0.8734177215189873, "acc_norm_stderr": 0.02164419572795517 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7757847533632287, "acc_stderr": 0.027991534258519517, "acc_norm": 0.7757847533632287, "acc_norm_stderr": 0.027991534258519517 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8320610687022901, "acc_stderr": 0.032785485373431386, "acc_norm": 0.8320610687022901, "acc_norm_stderr": 0.032785485373431386 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8842975206611571, "acc_stderr": 0.029199802455622814, "acc_norm": 0.8842975206611571, "acc_norm_stderr": 0.029199802455622814 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8518518518518519, "acc_stderr": 0.03434300243631, "acc_norm": 0.8518518518518519, "acc_norm_stderr": 0.03434300243631 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971726, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971726 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5267857142857143, "acc_stderr": 0.047389751192741546, "acc_norm": 0.5267857142857143, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.0398913985953177, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.0398913985953177 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8531289910600255, "acc_stderr": 0.012658201736147278, "acc_norm": 0.8531289910600255, "acc_norm_stderr": 0.012658201736147278 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7687861271676301, "acc_stderr": 0.022698657167855713, "acc_norm": 0.7687861271676301, "acc_norm_stderr": 0.022698657167855713 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5094972067039106, "acc_stderr": 0.01671948464334877, "acc_norm": 0.5094972067039106, "acc_norm_stderr": 0.01671948464334877 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7679738562091504, "acc_stderr": 0.024170840879340873, "acc_norm": 0.7679738562091504, "acc_norm_stderr": 0.024170840879340873 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7588424437299035, "acc_stderr": 0.024296594034763426, "acc_norm": 0.7588424437299035, "acc_norm_stderr": 0.024296594034763426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8055555555555556, "acc_stderr": 0.022021366100220194, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.022021366100220194 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5567375886524822, "acc_stderr": 0.02963483847376601, "acc_norm": 0.5567375886524822, "acc_norm_stderr": 0.02963483847376601 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5560625814863103, "acc_stderr": 0.012689708167787677, "acc_norm": 0.5560625814863103, "acc_norm_stderr": 0.012689708167787677 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7536764705882353, "acc_stderr": 0.02617343857052, "acc_norm": 0.7536764705882353, "acc_norm_stderr": 0.02617343857052 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7630718954248366, "acc_stderr": 0.01720166216978977, "acc_norm": 0.7630718954248366, "acc_norm_stderr": 0.01720166216978977 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.04309118709946458, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.04309118709946458 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7755102040816326, "acc_stderr": 0.0267114305555384, "acc_norm": 0.7755102040816326, "acc_norm_stderr": 0.0267114305555384 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8955223880597015, "acc_stderr": 0.021628920516700643, "acc_norm": 0.8955223880597015, "acc_norm_stderr": 0.021628920516700643 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.93, "acc_stderr": 0.0256432399976243, "acc_norm": 0.93, "acc_norm_stderr": 0.0256432399976243 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.02709729011807082, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.02709729011807082 }, "harness|truthfulqa:mc|0": { "mc1": 0.401468788249694, "mc1_stderr": 0.017160273901693654, "mc2": 0.5608488880093394, "mc2_stderr": 0.014874770245335572 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.686315655708313, -0.8916735649108887, 0.28474393486976624, 0.16493888199329376, -0.22576747834682465, -0.014783203601837158, 0.05304227024316788, -0.22245429456233978, 0.5722194314002991, -0.03648696839809418, -0.4523625671863556, -0.6907521486282349, -0.3950772285461426, 0.21368034183979034, -0.03563665598630905, 0.8179017901420593, -0.19810886681079865, -0.11248014122247696, 0.10722587257623672, -0.03501508757472038, -0.23863478004932404, -0.3176230490207672, -0.4964500963687897, -0.35211795568466187, 0.16320879757404327, 0.40505778789520264, 0.4714978337287903, 0.7414132952690125, 0.6725035309791565, 0.290688157081604, -0.3347889184951782, -0.006564943585544825, -0.1681644320487976, -0.3018782138824463, 0.40477439761161804, -0.3677055239677429, -0.8716390132904053, 0.2584154009819031, 0.8272916674613953, 0.6111117601394653, -0.07074383646249771, 0.32937687635421753, 0.04181376472115517, 0.6351043581962585, -0.3732209801673889, -0.004679336212575436, -0.25959840416908264, 0.2909584045410156, -0.19886080920696259, -0.281451016664505, -0.29774147272109985, -0.24002307653427124, -0.13635657727718353, -0.888449490070343, 0.23700164258480072, 0.3227250277996063, 1.5486116409301758, -0.14349354803562164, -0.3195766806602478, 0.1481977254152298, -0.14543397724628448, 1.0291047096252441, -0.8928480744361877, 0.4292023777961731, 0.7666123509407043, 0.130309596657753, -0.18809157609939575, -0.5432852506637573, -0.6466926336288452, 0.06047090142965317, -0.3632453680038452, 0.3639128506183624, -0.08213546127080917, -0.1970854550600052, 0.3776313066482544, 0.6624805331230164, -0.6985859870910645, 0.1581474244594574, -0.6711375117301941, -0.16340690851211548, 1.0773591995239258, 0.35632967948913574, 0.040375083684921265, -0.398679256439209, -0.6568387746810913, -0.6478212475776672, -0.4292115569114685, 0.2570934593677521, 0.4375586211681366, 0.3241592347621918, -0.4055481255054474, 0.7302379012107849, -0.47473907470703125, 0.5584232807159424, 0.42089295387268066, -0.010223502293229103, 0.8869451880455017, -0.6749797463417053, -0.5433185696601868, -0.01894800178706646, 1.1306838989257812, 0.5379490852355957, 0.0028197227511554956, 0.22616079449653625, 0.03174731135368347, -0.11999968439340591, -0.012106993235647678, -0.843374490737915, -0.2796749174594879, 0.19515398144721985, -0.4187627136707306, -0.4788757860660553, 0.31856048107147217, -0.8717005848884583, 0.12026336789131165, -0.027117913588881493, 0.4464122951030731, -0.48500949144363403, -0.14051596820354462, 0.2310943305492401, -0.42833828926086426, 0.8493635058403015, -0.18444012105464935, -0.7208669781684875, 0.36270013451576233, 0.5219869613647461, 0.7597837448120117, -0.0908922404050827, -0.39838558435440063, -0.1222945973277092, -0.07624740898609161, -0.31457895040512085, 0.5208924412727356, -0.25112149119377136, -0.43042412400245667, -0.28835979104042053, 0.2691362202167511, -0.19752740859985352, -0.3193778395652771, 0.713620662689209, -0.2977102994918823, 0.23273055255413055, -0.4230128824710846, -0.6325832009315491, 0.11933557689189911, 0.36390194296836853, -0.4435656666755676, 1.267775058746338, 0.25161415338516235, -0.8380487561225891, 0.3892321288585663, -0.6102644205093384, -0.1823386400938034, -0.0006035074475221336, -0.06258350610733032, -0.8022997975349426, -0.250728040933609, 0.15900222957134247, 0.38811904191970825, -0.14262326061725616, -0.11150582879781723, -0.3779042661190033, -0.38333603739738464, 0.36861705780029297, -0.1867707371711731, 1.2640924453735352, -0.007151472382247448, -0.7530874609947205, -0.08759646117687225, -1.269348382949829, 0.3158440887928009, 0.23482568562030792, -0.38282838463783264, -0.1952505260705948, -0.45430535078048706, -0.044616036117076874, 0.15959273278713226, 0.3076055943965912, -0.8192169666290283, 0.24029137194156647, -0.3620457351207733, 0.1963755041360855, 1.2768367528915405, 0.039328888058662415, 0.13603179156780243, -0.5210050940513611, 0.5640353560447693, 0.19680261611938477, 0.21570248901844025, 0.437497615814209, -0.6105186343193054, -0.8219121098518372, -0.4724748432636261, -0.03752236068248749, 0.6011301875114441, -0.2732142508029938, 1.1203632354736328, 0.046232063323259354, -0.9027769565582275, -0.4709152281284332, -0.12827304005622864, 0.5325057506561279, 0.7768466472625732, 0.5621351003646851, -0.009389972314238548, -0.6209139227867126, -1.1464102268218994, -0.31959834694862366, -0.168489009141922, 0.15924791991710663, 0.16587448120117188, 1.021687388420105, -0.24701817333698273, 0.5998921394348145, -1.0428911447525024, -0.20279358327388763, 0.20250219106674194, -0.1382519006729126, 0.8121206760406494, 0.7481908202171326, 0.4598137438297272, -0.7133634686470032, -0.5554286241531372, 0.20876727998256683, -0.9202501177787781, -0.006578878033906221, 0.14851225912570953, -0.3742186725139618, 0.1748618334531784, 0.14927035570144653, -0.71296226978302, 0.5347111225128174, 0.24504674971103668, -1.1059762239456177, 1.0378202199935913, -0.29968029260635376, 0.5858751535415649, -1.0427839756011963, 0.1968172937631607, -0.05646205320954323, 0.019912803545594215, -0.5151769518852234, 0.09563140571117401, 0.10141320526599884, 0.4433937668800354, -0.5251720547676086, 0.7948489189147949, -0.6447635889053345, -0.07554274052381516, 0.457511305809021, 0.14338871836662292, -0.10950922220945358, 0.3872682452201843, -0.24341273307800293, 0.7872075438499451, 0.7185655832290649, -0.4644021987915039, 0.5202683806419373, 0.4129472076892853, -0.24204744398593903, 0.7276581525802612, -0.5103470683097839, -0.28103625774383545, 0.30989426374435425, -0.025375166907906532, -0.8347601890563965, -0.47046613693237305, 0.0889829769730568, -0.580280601978302, -0.13135161995887756, 0.3911116123199463, -0.3277800381183624, -0.7902307510375977, -0.9579481482505798, 0.3623694181442261, 0.7730215787887573, -0.4141225516796112, -0.12289305776357651, 0.058902353048324585, 0.14963355660438538, -0.7990992069244385, -0.8135927319526672, -0.544646680355072, -0.2370917648077011, -0.6845633387565613, 0.3051205575466156, -0.2641870677471161, -0.27328965067863464, -0.05520947650074959, -0.22560004889965057, -0.3133891522884369, 0.035915177315473557, 0.16953489184379578, 0.7007871270179749, -0.4382299482822418, -0.2906864285469055, -0.18883438408374786, -0.18450814485549927, 0.22923503816127777, -0.10220013558864594, 0.3808734118938446, -0.4870530664920807, -0.4303550720214844, -0.46046629548072815, -0.023139873519539833, 0.6618720889091492, -0.07011574506759644, 0.724234402179718, 0.44723501801490784, -0.3081020414829254, 0.03903722018003464, -0.27276843786239624, -0.2612104117870331, -0.5904803276062012, 0.30030831694602966, -0.517745852470398, -1.0917574167251587, 0.7498806118965149, 0.5577199459075928, 0.08659985661506653, 1.1712379455566406, 0.5762705206871033, -0.29497024416923523, 1.069153904914856, 0.05629609897732735, 0.32825276255607605, 0.3703495264053345, -0.6595520973205566, 0.14616240561008453, -0.9998528361320496, -0.2799358069896698, -0.5510349273681641, -0.4722541272640228, -0.7656814455986023, -0.0852452740073204, 0.32970160245895386, 0.20125916600227356, -0.7275909185409546, 0.5400901436805725, -0.840621829032898, 0.5884366631507874, 0.5814623236656189, 0.29358845949172974, 0.22665485739707947, -0.11416848748922348, -0.3441387414932251, -0.08316037058830261, -0.4744006395339966, -0.23177725076675415, 1.2641063928604126, 0.2718803584575653, 0.6741940975189209, 0.1283825784921646, 0.9079297780990601, 0.0973895937204361, -0.12308063358068466, -0.537162721157074, 0.683869481086731, 0.1188322901725769, -0.8085123300552368, -0.4473120868206024, -0.5360015630722046, -1.092110514640808, 0.4161735475063324, -0.12060169130563736, -0.8417847156524658, 0.0996813252568245, 0.04916675388813019, -0.230422705411911, 0.4837535619735718, -0.5016764998435974, 0.8068428635597229, -0.10630010813474655, -0.45641234517097473, 0.05720916762948036, -0.79327392578125, 0.5252494812011719, 0.1493736356496811, 0.2597526013851166, 0.04359438270330429, 0.2572111189365387, 1.2244248390197754, -0.7879753112792969, 0.41381406784057617, 0.10062047094106674, -0.03794166445732117, 0.32739177346229553, -0.16361483931541443, 0.51618891954422, 0.09988084435462952, -0.010593926534056664, -0.08079158514738083, 0.3011559844017029, -0.8913623094558716, -0.05525119975209236, 0.9315691590309143, -0.9776574969291687, -0.6300292015075684, -0.8771235942840576, -0.5028548240661621, 0.07829350978136063, 0.5806623697280884, 0.3787616491317749, 0.5098711848258972, 0.03676503524184227, 0.4283837378025055, 0.8840150833129883, -0.14241401851177216, 0.5671690106391907, 0.2902364432811737, 0.030933590605854988, -0.6915168762207031, 0.884842574596405, 0.08029045909643173, 0.36815616488456726, 0.2753509283065796, 0.41793203353881836, -0.5186934471130371, -0.180304616689682, -0.23441509902477264, 0.5155813097953796, -0.682621419429779, -0.2611887753009796, -0.3428032696247101, -0.38875335454940796, -0.7541917562484741, -0.6084263324737549, -0.31915152072906494, -0.5235936045646667, -0.46290457248687744, -0.4414980709552765, 0.6067284345626831, 0.45608025789260864, -0.33006927371025085, 0.01788223534822464, -0.5209656357765198, 0.3196994662284851, 0.3583935499191284, 0.5516646504402161, -0.34468111395835876, -0.5941740274429321, 0.03877631574869156, -0.11199599504470825, -0.5656861066818237, -0.9423528909683228, 0.2960422933101654, -0.04970012232661247, 0.4882524013519287, 0.5695807337760925, 0.0996265634894371, 0.8559777736663818, -0.22301128506660461, 1.088423252105713, 0.3285863399505615, -0.8274787068367004, 0.7606245875358582, -0.32094675302505493, 0.20997154712677002, 0.617604672908783, 0.18657472729682922, -0.19125793874263763, -0.6764089465141296, -1.3015246391296387, -0.7691338658332825, 0.6510739326477051, 0.37876370549201965, -0.24943599104881287, 0.0788799300789833, 0.13638590276241302, -0.30029645562171936, -0.17191888391971588, -0.6965078711509705, -0.887305736541748, -0.13446125388145447, -0.4667913019657135, 0.07948897033929825, 0.03130107745528221, -0.3951810896396637, -0.8139917850494385, 0.9907097220420837, 0.0031518873292952776, 0.5740702152252197, 0.49275869131088257, 0.0677575021982193, 0.0194906834512949, 0.48915067315101624, 0.9428096413612366, 0.7267614006996155, -0.4447312653064728, 0.4182718098163605, 0.4109025299549103, -1.0281779766082764, 0.46721112728118896, 0.29296445846557617, -0.11208225041627884, -0.029558220878243446, 0.4799635112285614, 0.45564526319503784, 0.026258330792188644, -0.26876285672187805, 0.5693774819374084, 0.008191827684640884, -0.5645785331726074, -0.41034191846847534, 0.09789246320724487, -0.12184323370456696, 0.05280916765332222, 0.3839344084262848, -0.22126691043376923, -0.009635151363909245, -0.4920876622200012, 0.4013306498527527, 0.4136674404144287, -0.4685705006122589, -0.15649206936359406, 0.7111049294471741, -0.20485134422779083, -0.1713230460882187, 0.3506004512310028, -0.23183052241802216, -0.6440271139144897, 1.0815672874450684, 0.603831946849823, 0.6965401768684387, -0.23219138383865356, -0.06516461819410324, 0.8799031972885132, 0.3523997664451599, -0.016909057274460793, 0.5582641959190369, 0.30259254574775696, -0.22483545541763306, 0.1639222949743271, -0.8805468678474426, -0.03019557148218155, 0.14299418032169342, -0.8214506506919861, 0.3061598539352417, -0.5332443714141846, -0.18614311516284943, -0.008183011785149574, 0.4424999952316284, -0.457879900932312, 0.47577571868896484, -0.40723830461502075, 1.17816162109375, -0.9820976257324219, 0.7297850251197815, 0.7549430727958679, -0.5071507692337036, -1.0266140699386597, -0.5333004593849182, -0.006216472014784813, -0.7889270186424255, 0.6071724891662598, -0.09301184117794037, 0.1689886450767517, -0.0424618273973465, -0.7145995497703552, -0.9627188444137573, 1.4228143692016602, -0.09293738752603531, -0.4578339457511902, 0.2511195242404938, -0.03296327590942383, 0.4539303183555603, 0.2014075517654419, 0.5212212800979614, 0.7387067079544067, 0.7897037267684937, -0.14231941103935242, -0.7538909316062927, 0.37042656540870667, -0.5107590556144714, -0.2958783805370331, 0.5227721333503723, -0.9373939037322998, 1.237005591392517, 0.035712942481040955, 0.20397576689720154, -0.17998042702674866, 0.683444619178772, 0.8719818592071533, 0.3062446713447571, 0.378060519695282, 0.8957961797714233, 0.8416286706924438, -0.42035597562789917, 1.0008463859558105, -0.2049647867679596, 0.8585436940193176, 0.7034878134727478, 0.17629046738147736, 0.8466001152992249, 0.6896418929100037, -0.6024951338768005, 0.565414547920227, 0.7781742811203003, -0.29438382387161255, 0.3896350562572479, 0.26473382115364075, -0.10242843627929688, -0.14213643968105316, 0.41711556911468506, -0.8940730094909668, 0.1110006794333458, 0.04408499598503113, -0.30012059211730957, 0.11593198776245117, -0.40322473645210266, 0.28446662425994873, -0.11147511750459671, -0.030858715996146202, 0.36825668811798096, 0.06768178939819336, -0.42548200488090515, 0.9743305444717407, -0.15921412408351898, 0.7586493492126465, -0.5062359571456909, -0.10819996148347855, -0.36238226294517517, 0.5659767389297485, -0.4409681558609009, -1.0260387659072876, 0.10245528817176819, 0.06727525591850281, -0.1320190578699112, -0.10922646522521973, 0.7032150030136108, -0.18400758504867554, -0.7999186515808105, 0.09377851337194443, 0.05031779780983925, 0.11582492291927338, 0.505308210849762, -0.6631930470466614, -0.3108585774898529, -0.022233465686440468, -0.56069016456604, 0.15281938016414642, 0.27365371584892273, 0.24604326486587524, 0.5322978496551514, 0.6681343913078308, 0.14093749225139618, 0.4221738278865814, -0.5745366215705872, 0.7620870471000671, -1.0694421529769897, -0.7197701334953308, -0.9454504251480103, 0.4503142833709717, -0.3330695927143097, -0.8684815764427185, 1.04146409034729, 1.0336391925811768, 0.8709383010864258, 0.01810493879020214, 0.611721932888031, -0.3481674790382385, 0.25662150979042053, -0.44319823384284973, 0.9416370987892151, -0.8743178248405457, -0.19814880192279816, -0.2544628977775574, -0.7400527596473694, -0.32909828424453735, 0.8311421275138855, -0.13598260283470154, 0.014713769778609276, 1.0503097772598267, 0.6400040984153748, -0.08728340268135071, 0.05134127661585808, -0.06534967571496964, 0.5788837671279907, 0.40746110677719116, 1.0184663534164429, 0.6631826758384705, -0.8492095470428467, 0.29910552501678467, -0.5539318919181824, -0.3770129680633545, -0.41120433807373047, -0.4637756943702698, -0.8582637906074524, -0.49337202310562134, -0.2786674499511719, -0.6582978367805481, -0.14059844613075256, 1.00146484375, 0.43784940242767334, -0.9058643579483032, -0.3967186212539673, -0.11490629613399506, 0.17638516426086426, -0.5618082284927368, -0.4122455418109894, 0.7637148499488831, -0.07656364887952805, -0.5180264711380005, 0.2090606391429901, -0.17104080319404602, 0.21379925310611725, 0.044911473989486694, -0.4361492991447449, -0.6916164755821228, 0.017303088679909706, 0.4815619885921478, 0.3324679136276245, -0.7422139644622803, -0.6688682436943054, 0.28068703413009644, -0.5152598023414612, 0.3809483051300049, -0.05706010386347771, -0.5656448006629944, 0.02574150264263153, 0.6938607692718506, 0.45416414737701416, 0.6108558177947998, -0.07731416821479797, 0.04475453123450279, -0.6372069716453552, 0.16432413458824158, -0.017821846529841423, 0.24083492159843445, -0.08368278294801712, -0.3269774913787842, 0.7644135355949402, 0.6274029016494751, -0.5308171510696411, -1.1494773626327515, -0.4268485903739929, -1.4295763969421387, -0.07284516096115112, 1.1426126956939697, 0.05130089446902275, -0.5074172616004944, 0.2711487114429474, -0.11941943317651749, 0.2192380130290985, -0.31573593616485596, 0.7412657141685486, 0.753185510635376, -0.3477071225643158, 0.13064922392368317, -0.6778830289840698, 0.3697510063648224, 0.5684831738471985, -1.1727317571640015, -0.10936394333839417, 0.2212630659341812, 0.2888184189796448, 0.38568583130836487, 0.6438577175140381, -0.08140841126441956, 0.30425187945365906, 0.21303097903728485, 0.010871426202356815, 0.01538340374827385, 0.09782913327217102, -0.29185009002685547, 0.06335874646902084, -0.22589948773384094, -0.44718897342681885 ]
open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-2.1
open-llm-leaderboard
2023-10-22T11:51:11Z
201
0
[ "region:us" ]
null
2023-08-30T15:35:00Z
--- pretty_name: Evaluation run of jondurbin/airoboros-l2-13b-2.1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [jondurbin/airoboros-l2-13b-2.1](https://huggingface.co/jondurbin/airoboros-l2-13b-2.1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-2.1\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T11:50:58.922768](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-2.1/blob/main/results_2023-10-22T11-50-58.922768.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.34458892617449666,\n\ \ \"em_stderr\": 0.004866841438021559,\n \"f1\": 0.4243645134228212,\n\ \ \"f1_stderr\": 0.004703880113863456,\n \"acc\": 0.39311250241596774,\n\ \ \"acc_stderr\": 0.00863314852533744\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.34458892617449666,\n \"em_stderr\": 0.004866841438021559,\n\ \ \"f1\": 0.4243645134228212,\n \"f1_stderr\": 0.004703880113863456\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0356330553449583,\n \ \ \"acc_stderr\": 0.005106107853744191\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7505919494869772,\n \"acc_stderr\": 0.012160189196930689\n\ \ }\n}\n```" repo_url: https://huggingface.co/jondurbin/airoboros-l2-13b-2.1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|arc:challenge|25_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|arc:challenge|25_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-30T18:13:29.562428.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T07_13_42.257437 path: - '**/details_harness|drop|3_2023-10-22T07-13-42.257437.parquet' - split: 2023_10_22T11_50_58.922768 path: - '**/details_harness|drop|3_2023-10-22T11-50-58.922768.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T11-50-58.922768.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T07_13_42.257437 path: - '**/details_harness|gsm8k|5_2023-10-22T07-13-42.257437.parquet' - split: 2023_10_22T11_50_58.922768 path: - '**/details_harness|gsm8k|5_2023-10-22T11-50-58.922768.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T11-50-58.922768.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hellaswag|10_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hellaswag|10_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T15:34:36.295006.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:13:29.562428.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:13:29.562428.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_30T15_34_36.295006 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T15:34:36.295006.parquet' - split: 2023_08_30T18_13_29.562428 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T18:13:29.562428.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T18:13:29.562428.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T07_13_42.257437 path: - '**/details_harness|winogrande|5_2023-10-22T07-13-42.257437.parquet' - split: 2023_10_22T11_50_58.922768 path: - '**/details_harness|winogrande|5_2023-10-22T11-50-58.922768.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T11-50-58.922768.parquet' - config_name: results data_files: - split: 2023_08_30T15_34_36.295006 path: - results_2023-08-30T15:34:36.295006.parquet - split: 2023_08_30T18_13_29.562428 path: - results_2023-08-30T18:13:29.562428.parquet - split: 2023_10_22T07_13_42.257437 path: - results_2023-10-22T07-13-42.257437.parquet - split: 2023_10_22T11_50_58.922768 path: - results_2023-10-22T11-50-58.922768.parquet - split: latest path: - results_2023-10-22T11-50-58.922768.parquet --- # Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-2.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-l2-13b-2.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-2.1](https://huggingface.co/jondurbin/airoboros-l2-13b-2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-2.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T11:50:58.922768](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-2.1/blob/main/results_2023-10-22T11-50-58.922768.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.34458892617449666, "em_stderr": 0.004866841438021559, "f1": 0.4243645134228212, "f1_stderr": 0.004703880113863456, "acc": 0.39311250241596774, "acc_stderr": 0.00863314852533744 }, "harness|drop|3": { "em": 0.34458892617449666, "em_stderr": 0.004866841438021559, "f1": 0.4243645134228212, "f1_stderr": 0.004703880113863456 }, "harness|gsm8k|5": { "acc": 0.0356330553449583, "acc_stderr": 0.005106107853744191 }, "harness|winogrande|5": { "acc": 0.7505919494869772, "acc_stderr": 0.012160189196930689 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4637080729007721, -0.6417862176895142, 0.0830620601773262, 0.22033171355724335, -0.15387380123138428, 0.07061693072319031, -0.36693188548088074, -0.2150876224040985, 0.4155997335910797, 0.5582953691482544, -0.685172975063324, -0.8785203695297241, -0.7137202620506287, 0.18499980866909027, -0.17584574222564697, 1.1589787006378174, -0.28880104422569275, -0.2824735939502716, 0.023550691083073616, -0.2900751233100891, -0.30567190051078796, -0.38297224044799805, -0.47509124875068665, -0.4330955445766449, 0.3915991187095642, 0.5534810423851013, 0.3820682168006897, 0.7709704637527466, 0.6561283469200134, 0.3617519438266754, -0.20857582986354828, 0.21478554606437683, -0.4242105782032013, -0.11324269324541092, 0.27000999450683594, -0.5943891406059265, -0.7302800416946411, 0.12123626470565796, 0.6691821813583374, 0.5219554305076599, -0.29840534925460815, 0.6089426279067993, 0.1005561351776123, 0.6560408473014832, -0.4663287401199341, 0.3437824845314026, -0.3639277219772339, -0.05257128179073334, -0.3683820068836212, -0.17552432417869568, -0.024581050500273705, -0.3126382827758789, -0.172068789601326, -0.5460110306739807, 0.15542277693748474, 0.1802666187286377, 1.088523507118225, 0.15270352363586426, -0.1661653071641922, -0.25446003675460815, -0.21831734478473663, 0.8053188323974609, -0.9126355648040771, 0.06011495739221573, 0.6779192090034485, 0.10150517523288727, -0.23295964300632477, -0.5072084665298462, -0.32256272435188293, -0.0674310028553009, -0.283943772315979, 0.1884351223707199, -0.07365713268518448, -0.11507271975278854, 0.43226855993270874, 0.5470596551895142, -0.7258049249649048, 0.08833178132772446, -0.6313034296035767, -0.1027442067861557, 0.9450620412826538, 0.3211791217327118, 0.016526537016034126, -0.5757222175598145, -0.3651233911514282, -0.35373032093048096, -0.43934202194213867, 0.22957266867160797, 0.559775173664093, 0.45961010456085205, -0.663269579410553, 0.7412778735160828, -0.4165659248828888, 0.6493832468986511, -0.09741593897342682, -0.2920680344104767, 0.8457136154174805, -0.5300115346908569, -0.2596992254257202, 0.03874996677041054, 1.0045961141586304, 0.4248735010623932, 0.038060110062360764, 0.2574470043182373, -0.26393017172813416, -0.15667973458766937, 0.08695897459983826, -0.786922037601471, -0.15100142359733582, 0.3826003670692444, -0.6369537115097046, -0.4611999988555908, 0.29589974880218506, -0.9212486743927002, -0.1685498207807541, -0.2620040774345398, 0.1544688493013382, -0.16005642712116241, -0.4728429913520813, -0.12023989111185074, -0.18393278121948242, 0.20667780935764313, 0.13809099793434143, -0.6219586133956909, 0.3945732116699219, 0.5924877524375916, 0.9723285436630249, -0.12021524459123611, -0.4047391414642334, -0.3033255636692047, -0.2192954570055008, -0.25018179416656494, 0.4945538640022278, -0.1635485589504242, -0.4168004095554352, -0.17212404310703278, 0.24295219779014587, -0.30314838886260986, -0.6824206709861755, 0.5896546840667725, -0.22754840552806854, 0.14637817442417145, -0.20237313210964203, -0.4280690848827362, -0.11605319380760193, 0.3949582576751709, -0.6582309603691101, 1.4146853685379028, 0.3994642198085785, -0.8440611362457275, 0.04311256855726242, -0.7759971022605896, -0.21965132653713226, 0.007955246604979038, 0.01771511882543564, -0.5601259469985962, -0.13666196167469025, 0.13990357518196106, 0.5744119882583618, -0.38019514083862305, 0.10774708539247513, -0.2992497980594635, -0.38678833842277527, 0.1981595754623413, -0.1022748276591301, 1.0906330347061157, 0.2362508475780487, -0.42250245809555054, 0.05610010400414467, -0.9810522198677063, 0.13172149658203125, 0.2755677402019501, -0.5657557249069214, -0.19207461178302765, -0.2876585125923157, 0.12116638571023941, 0.09714516997337341, 0.4797884523868561, -0.4702787697315216, 0.36882084608078003, -0.24608241021633148, 0.21412861347198486, 0.982771635055542, 0.00950946006923914, 0.157703697681427, -0.4726136326789856, 0.6210607886314392, 0.02720530331134796, 0.2828659415245056, 0.09868445992469788, -0.5202546119689941, -0.7416812777519226, -0.22645311057567596, 0.1476009488105774, 0.6857396960258484, -0.5145984888076782, 0.7669451236724854, -0.36974504590034485, -0.756409227848053, -0.7031576037406921, 0.1966009885072708, 0.5297383069992065, 0.49673473834991455, 0.3602806031703949, -0.19197238981723785, -0.7170098423957825, -1.0022320747375488, -0.012290343642234802, -0.35004204511642456, 0.09297136962413788, 0.5281705260276794, 0.9913997650146484, -0.3244183361530304, 0.5790166258811951, -0.7988777756690979, -0.3036375641822815, -0.312394917011261, 0.08338288962841034, 0.8010594844818115, 0.5165868997573853, 0.5032482743263245, -0.6540936827659607, -0.33115294575691223, 0.0034468036610633135, -0.7880551218986511, -0.34462764859199524, -0.1312883049249649, -0.2487121969461441, 0.3837193548679352, 0.009729078970849514, -0.4820473790168762, 0.5334328413009644, 0.6052559018135071, -0.5838717818260193, 0.6477488279342651, -0.01629694178700447, 0.3884474039077759, -1.1271306276321411, 0.235176220536232, 0.074630007147789, 0.07689676433801651, -0.412674218416214, -0.13588257133960724, -0.007145912852138281, 0.3687593638896942, -0.41579410433769226, 0.7059332728385925, -0.4080432653427124, -0.18066254258155823, 0.025476926937699318, 0.10951851308345795, -0.053066007792949677, 0.5480249524116516, -0.32470041513442993, 0.697762131690979, 0.5338035821914673, -0.36496055126190186, 0.46987974643707275, 0.45999398827552795, -0.5159812569618225, 0.3330160081386566, -0.5276233553886414, -0.0789669081568718, 0.13071022927761078, 0.18542996048927307, -0.8924301266670227, -0.3952536880970001, 0.4304805099964142, -0.6113699078559875, 0.23973967134952545, -0.3272983729839325, -0.5763258337974548, -0.49575987458229065, -0.49379274249076843, 0.1715448647737503, 0.5039392709732056, -0.4868760108947754, 0.2543456256389618, 0.40748700499534607, -0.005898142699152231, -0.6403495669364929, -0.7479856610298157, -0.1767345815896988, -0.32905998826026917, -0.5954791307449341, 0.2913927435874939, -0.18027514219284058, -0.3099726736545563, 0.053929347544908524, -0.14641793072223663, -0.1003234013915062, 0.21915499866008759, 0.40492239594459534, 0.5870757102966309, -0.03586995601654053, -0.3746596574783325, -0.16521310806274414, -0.12478179484605789, 0.14153002202510834, 0.14040213823318481, 0.547111451625824, -0.260314404964447, -0.219968780875206, -0.2537151575088501, 0.10969226062297821, 0.48253899812698364, -0.11403122544288635, 0.8368760943412781, 0.6677509546279907, -0.2994767725467682, -0.03136489912867546, -0.4570457339286804, 0.08735622465610504, -0.48962390422821045, 0.3058813810348511, -0.3057926595211029, -0.7453476786613464, 0.8196350336074829, 0.2280883491039276, 0.1098986268043518, 0.7276061177253723, 0.5649316906929016, 0.0140581876039505, 0.7571737170219421, 0.10776730626821518, -0.10349103808403015, 0.4962208569049835, -0.8561140894889832, -0.10532818734645844, -1.1869815587997437, -0.5322034955024719, -0.5182625651359558, -0.3876250088214874, -0.8265668749809265, -0.31023767590522766, 0.2827928960323334, 0.29988545179367065, -0.3890354633331299, 0.5460337400436401, -0.6551262140274048, 0.20949727296829224, 0.6330053210258484, 0.16014669835567474, -0.06679215282201767, -0.0649731457233429, -0.08308374881744385, 0.23029860854148865, -0.5246858596801758, -0.3388475179672241, 1.3243169784545898, 0.24201801419258118, 0.695119321346283, 0.038867440074682236, 0.9860180616378784, 0.25993484258651733, 0.33502867817878723, -0.4688805639743805, 0.6131797432899475, -0.04536643996834755, -0.5977253317832947, -0.13478444516658783, -0.6714204549789429, -0.9691199064254761, 0.22817258536815643, 0.000168180835316889, -0.913114607334137, 0.04505521059036255, -0.006785740610212088, -0.038028404116630554, 0.30590951442718506, -0.5429469347000122, 0.7999935150146484, -0.3136705458164215, -0.33989691734313965, 0.06818269938230515, -0.858771026134491, 0.38149294257164, 0.0785747841000557, 0.41074293851852417, -0.295549601316452, 0.007105723023414612, 1.220974326133728, -0.5571568012237549, 0.7077347040176392, -0.2850123643875122, 0.11373363435268402, 0.40955689549446106, -0.3074166774749756, 0.5401127338409424, -0.03182503953576088, -0.28145188093185425, 0.4855465292930603, -0.11646092683076859, -0.24825286865234375, -0.33954328298568726, 0.960724413394928, -0.9375841021537781, -0.3141700327396393, -0.4977402687072754, -0.5054232478141785, 0.2871120572090149, 0.2968299984931946, 0.3068479299545288, 0.39804017543792725, 0.13669462502002716, 0.3042435050010681, 0.26347437500953674, -0.20151960849761963, 0.48445969820022583, 0.44865095615386963, -0.2383977323770523, -0.8514577746391296, 0.685655415058136, 0.32791104912757874, 0.10213593393564224, 0.1995447278022766, 0.07093264162540436, -0.511394202709198, -0.4549616277217865, -0.4362950026988983, 0.3063129484653473, -0.6059375405311584, -0.4299648106098175, -0.42799556255340576, -0.24235832691192627, -0.46269819140434265, -0.0926293358206749, -0.36876025795936584, -0.4453932046890259, -0.49912041425704956, -0.3021138906478882, 0.6112816333770752, 0.6012900471687317, -0.41977304220199585, 0.2968246638774872, -0.7239343523979187, 0.30699941515922546, -0.19813500344753265, 0.4415811598300934, -0.11369168758392334, -0.5788763165473938, -0.4151933491230011, 0.15093497931957245, -0.35183462500572205, -0.8645129799842834, 0.6037917137145996, -0.01989900693297386, 0.7551838755607605, 0.1376562863588333, 0.09208115935325623, 0.875021755695343, -0.18958771228790283, 1.0334440469741821, 0.020923975855112076, -0.7335367798805237, 0.8371745944023132, -0.2753138840198517, 0.10090161114931107, 0.5579996705055237, 0.19788581132888794, -0.4960484206676483, -0.25777965784072876, -0.992620587348938, -1.1653934717178345, 1.093217372894287, 0.5575612783432007, -0.370728999376297, 0.07743529975414276, 0.36325502395629883, -0.06959359347820282, 0.23340047895908356, -0.6068381667137146, -0.7951357960700989, -0.16767804324626923, -0.3018040359020233, -0.057603619992733, -0.017048247158527374, -0.4183085560798645, -0.46260198950767517, 0.9384055137634277, -0.007076925132423639, 0.4843124449253082, 0.18150578439235687, 0.002472511027008295, -0.1265442818403244, 0.2092280238866806, 0.5027585625648499, 0.7038219571113586, -0.4507647752761841, -0.02969839610159397, 0.25618603825569153, -0.503616452217102, 0.08679652959108353, 0.38527536392211914, 0.006136276759207249, -0.07728604227304459, 0.7288805842399597, 1.0355515480041504, 0.06490356475114822, -0.4495692551136017, 0.5104586482048035, 0.11767444014549255, -0.3107234835624695, -0.4791896939277649, 0.220436230301857, -0.08667197823524475, 0.3447253406047821, 0.42665138840675354, -0.031848203390836716, -0.005323932506144047, -0.2739221453666687, 0.27577468752861023, 0.23709557950496674, -0.05685734376311302, -0.25253379344940186, 0.5752671957015991, -0.039171263575553894, -0.34775644540786743, 0.7443279027938843, -0.1331627070903778, -0.6018208861351013, 1.1588650941848755, 0.3789467513561249, 0.8323147296905518, -0.11569308489561081, 0.1019778847694397, 0.6145191788673401, 0.3726198673248291, -0.16955162584781647, 0.6129159331321716, 0.12807750701904297, -0.5817479491233826, -0.18391531705856323, -0.8117550611495972, -0.1967562735080719, 0.3179904818534851, -1.1482410430908203, 0.3298044502735138, -0.15503546595573425, -0.1815844625234604, -0.14483731985092163, 0.4605507254600525, -0.8454030156135559, 0.1886509358882904, 0.040128011256456375, 0.9480591416358948, -1.0361449718475342, 0.6330431699752808, 0.8316223621368408, -0.4000721275806427, -0.9131686687469482, -0.30617040395736694, 0.11344451457262039, -0.879332423210144, 0.5351102948188782, 0.3347018361091614, 0.39307525753974915, -0.12330830842256546, -0.6260432004928589, -1.0468579530715942, 1.5574729442596436, 0.10285129398107529, -0.50729900598526, 0.20062294602394104, 0.08061221987009048, 0.3868280053138733, -0.27218928933143616, 0.5792825222015381, 0.709158718585968, 0.7632518410682678, -0.07193892449140549, -0.9437853097915649, 0.25635725259780884, -0.5527889728546143, -0.009556938894093037, 0.373670369386673, -0.8913457989692688, 0.9788815379142761, -0.13919657468795776, 0.05451203137636185, -0.025167454034090042, 0.34489884972572327, 0.6408963203430176, 0.29955002665519714, 0.4703137278556824, 0.7898678183555603, 0.6842604875564575, -0.3620457649230957, 1.0718680620193481, -0.2915775775909424, 0.8738458156585693, 1.0070971250534058, 0.011167933233082294, 0.7131038904190063, 0.36091041564941406, -0.4714547097682953, 0.5350230932235718, 0.7895025610923767, -0.41383227705955505, 0.4896986484527588, 0.19546180963516235, 0.03841211646795273, 0.017505737021565437, -0.009525161236524582, -0.49731025099754333, 0.43977001309394836, 0.23720571398735046, -0.5091133713722229, -0.12016292661428452, -0.2457294762134552, 0.17012083530426025, -0.34862327575683594, -0.21568046510219574, 0.6290204524993896, -0.06406937539577484, -0.6106363534927368, 0.8185150623321533, -0.04733992740511894, 0.6659416556358337, -0.7011814117431641, -0.15375089645385742, -0.2381904125213623, 0.2196405827999115, -0.5282592177391052, -1.0186470746994019, 0.22453489899635315, 0.14112742245197296, -0.18216150999069214, -0.26269322633743286, 0.5799665451049805, -0.32375362515449524, -0.6181918382644653, 0.4377565383911133, 0.3501301407814026, 0.3375113606452942, 0.1969640552997589, -0.9257999062538147, 0.36031070351600647, 0.30578047037124634, -0.8315505385398865, 0.3508119583129883, 0.2302415370941162, 0.17262136936187744, 0.5661067962646484, 0.7573485374450684, 0.21963003277778625, 0.10705776512622833, -0.08782785385847092, 1.0577436685562134, -0.7518439888954163, -0.33543211221694946, -0.8354766964912415, 0.8499497175216675, -0.2708684504032135, -0.7080144882202148, 0.7916504740715027, 0.9584479331970215, 0.8756336569786072, 0.16876660287380219, 0.8285168409347534, -0.55287104845047, 0.4808753728866577, -0.45195311307907104, 0.7755309343338013, -0.7374506592750549, 0.359544575214386, -0.15465432405471802, -0.7702975869178772, -0.08811718225479126, 0.7639272212982178, -0.09470777213573456, -0.02681775763630867, 0.5568569302558899, 0.9491660594940186, 0.12707678973674774, 0.044369351118803024, -0.10743352025747299, 0.4472518861293793, 0.36745962500572205, 0.5697898864746094, 0.5872514843940735, -0.6869664192199707, 0.4142676591873169, -0.6925799250602722, -0.45612284541130066, -0.23254938423633575, -0.6769675016403198, -0.833429217338562, -0.5265945196151733, -0.3326524794101715, -0.5577207803726196, -0.01340803224593401, 1.0954777002334595, 0.45744630694389343, -0.808767557144165, -0.4734736979007721, 0.009391914121806622, 0.17240437865257263, -0.26025259494781494, -0.3553366959095001, 0.5587636828422546, -0.011778750456869602, -0.6858982443809509, 0.3269883692264557, -0.14040198922157288, -0.10270660370588303, -0.02242966741323471, -0.28513020277023315, -0.39353564381599426, -0.30844300985336304, 0.3382055163383484, 0.10895884037017822, -0.749602198600769, -0.2916833460330963, -0.09410761296749115, -0.024945665150880814, 0.24556264281272888, 0.27868059277534485, -0.6009097695350647, 0.10249412804841995, 0.5871278643608093, 0.23898640275001526, 0.6628528833389282, 0.0629916563630104, 0.10544019937515259, -0.816932737827301, 0.007449669297784567, 0.0052814665250480175, 0.5094455480575562, 0.2245142012834549, -0.43922358751296997, 1.039621114730835, 0.36323511600494385, -0.7026647329330444, -1.003456473350525, -0.24283301830291748, -1.1861709356307983, -0.07133730500936508, 1.4717055559158325, -0.2756500244140625, -0.2986910343170166, 0.17774231731891632, -0.17953072488307953, 0.363014817237854, -0.6551770567893982, 0.4823151230812073, 0.7222169041633606, -0.4059428870677948, -0.026227133348584175, -0.6301960945129395, 0.2521723806858063, 0.007437825668603182, -1.0327837467193604, 0.012452535331249237, 0.32497239112854004, 0.39664602279663086, 0.25209563970565796, 0.6407817602157593, 0.04102913290262222, -0.1757730096578598, -0.012181243859231472, 0.174355611205101, -0.3057640790939331, -0.1938893347978592, -0.21842779219150543, 0.06789451837539673, -0.38588854670524597, -0.438132107257843 ]
open-llm-leaderboard/details_tianyil1__denas-llama2
open-llm-leaderboard
2023-08-30T16:05:15Z
201
0
[ "region:us" ]
null
2023-08-30T16:04:05Z
--- pretty_name: Evaluation run of tianyil1/denas-llama2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [tianyil1/denas-llama2](https://huggingface.co/tianyil1/denas-llama2) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_tianyil1__denas-llama2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-30T16:03:41.588049](https://huggingface.co/datasets/open-llm-leaderboard/details_tianyil1__denas-llama2/blob/main/results_2023-08-30T16%3A03%3A41.588049.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.45795528138208735,\n\ \ \"acc_stderr\": 0.03527742372208128,\n \"acc_norm\": 0.4618604191106212,\n\ \ \"acc_norm_stderr\": 0.03526366614184863,\n \"mc1\": 0.3084455324357405,\n\ \ \"mc1_stderr\": 0.01616803938315687,\n \"mc2\": 0.4524124423828304,\n\ \ \"mc2_stderr\": 0.01467368036859452\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.4991467576791809,\n \"acc_stderr\": 0.014611369529813272,\n\ \ \"acc_norm\": 0.5392491467576792,\n \"acc_norm_stderr\": 0.014566303676636584\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5880302728540131,\n\ \ \"acc_stderr\": 0.004911837730582202,\n \"acc_norm\": 0.7783310097590121,\n\ \ \"acc_norm_stderr\": 0.004145206350032315\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n\ \ \"acc_stderr\": 0.04284958639753399,\n \"acc_norm\": 0.43703703703703706,\n\ \ \"acc_norm_stderr\": 0.04284958639753399\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.4144736842105263,\n \"acc_stderr\": 0.04008973785779206,\n\ \ \"acc_norm\": 0.4144736842105263,\n \"acc_norm_stderr\": 0.04008973785779206\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.46,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.46037735849056605,\n \"acc_stderr\": 0.030676096599389184,\n\ \ \"acc_norm\": 0.46037735849056605,\n \"acc_norm_stderr\": 0.030676096599389184\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4236111111111111,\n\ \ \"acc_stderr\": 0.0413212501972337,\n \"acc_norm\": 0.4236111111111111,\n\ \ \"acc_norm_stderr\": 0.0413212501972337\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n\ \ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.41040462427745666,\n\ \ \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.41040462427745666,\n\ \ \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179963,\n\ \ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179963\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n\ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4340425531914894,\n \"acc_stderr\": 0.03240038086792747,\n\ \ \"acc_norm\": 0.4340425531914894,\n \"acc_norm_stderr\": 0.03240038086792747\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n\ \ \"acc_stderr\": 0.043727482902780064,\n \"acc_norm\": 0.3157894736842105,\n\ \ \"acc_norm_stderr\": 0.043727482902780064\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.42758620689655175,\n \"acc_stderr\": 0.041227371113703316,\n\ \ \"acc_norm\": 0.42758620689655175,\n \"acc_norm_stderr\": 0.041227371113703316\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.291005291005291,\n \"acc_stderr\": 0.023393826500484865,\n \"\ acc_norm\": 0.291005291005291,\n \"acc_norm_stderr\": 0.023393826500484865\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2698412698412698,\n\ \ \"acc_stderr\": 0.03970158273235172,\n \"acc_norm\": 0.2698412698412698,\n\ \ \"acc_norm_stderr\": 0.03970158273235172\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4612903225806452,\n\ \ \"acc_stderr\": 0.02835863485983692,\n \"acc_norm\": 0.4612903225806452,\n\ \ \"acc_norm_stderr\": 0.02835863485983692\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.3399014778325123,\n \"acc_stderr\": 0.033327690684107895,\n\ \ \"acc_norm\": 0.3399014778325123,\n \"acc_norm_stderr\": 0.033327690684107895\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\"\ : 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03825460278380025,\n \ \ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03825460278380025\n },\n\ \ \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5505050505050505,\n\ \ \"acc_stderr\": 0.035441324919479704,\n \"acc_norm\": 0.5505050505050505,\n\ \ \"acc_norm_stderr\": 0.035441324919479704\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\"\ : {\n \"acc\": 0.6787564766839378,\n \"acc_stderr\": 0.033699508685490674,\n\ \ \"acc_norm\": 0.6787564766839378,\n \"acc_norm_stderr\": 0.033699508685490674\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4230769230769231,\n \"acc_stderr\": 0.025049197876042338,\n\ \ \"acc_norm\": 0.4230769230769231,\n \"acc_norm_stderr\": 0.025049197876042338\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \ \ \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.37815126050420167,\n \"acc_stderr\": 0.03149930577784906,\n\ \ \"acc_norm\": 0.37815126050420167,\n \"acc_norm_stderr\": 0.03149930577784906\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943342,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943342\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6220183486238532,\n\ \ \"acc_stderr\": 0.02078918706672811,\n \"acc_norm\": 0.6220183486238532,\n\ \ \"acc_norm_stderr\": 0.02078918706672811\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.03054674526495318,\n\ \ \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.03054674526495318\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.5392156862745098,\n \"acc_stderr\": 0.03498501649369527,\n \"\ acc_norm\": 0.5392156862745098,\n \"acc_norm_stderr\": 0.03498501649369527\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.6033755274261603,\n \"acc_stderr\": 0.03184399873811225,\n \ \ \"acc_norm\": 0.6033755274261603,\n \"acc_norm_stderr\": 0.03184399873811225\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5426008968609866,\n\ \ \"acc_stderr\": 0.03343577705583065,\n \"acc_norm\": 0.5426008968609866,\n\ \ \"acc_norm_stderr\": 0.03343577705583065\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5190839694656488,\n \"acc_stderr\": 0.04382094705550988,\n\ \ \"acc_norm\": 0.5190839694656488,\n \"acc_norm_stderr\": 0.04382094705550988\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6363636363636364,\n \"acc_stderr\": 0.043913262867240704,\n \"\ acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.043913262867240704\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.48148148148148145,\n\ \ \"acc_stderr\": 0.04830366024635331,\n \"acc_norm\": 0.48148148148148145,\n\ \ \"acc_norm_stderr\": 0.04830366024635331\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.4662576687116564,\n \"acc_stderr\": 0.039194155450484096,\n\ \ \"acc_norm\": 0.4662576687116564,\n \"acc_norm_stderr\": 0.039194155450484096\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n\ \ \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n\ \ \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.5242718446601942,\n \"acc_stderr\": 0.049449010929737795,\n\ \ \"acc_norm\": 0.5242718446601942,\n \"acc_norm_stderr\": 0.049449010929737795\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7094017094017094,\n\ \ \"acc_stderr\": 0.029745048572674085,\n \"acc_norm\": 0.7094017094017094,\n\ \ \"acc_norm_stderr\": 0.029745048572674085\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \ \ \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6168582375478927,\n\ \ \"acc_stderr\": 0.017384774194885627,\n \"acc_norm\": 0.6168582375478927,\n\ \ \"acc_norm_stderr\": 0.017384774194885627\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.48554913294797686,\n \"acc_stderr\": 0.02690784985628254,\n\ \ \"acc_norm\": 0.48554913294797686,\n \"acc_norm_stderr\": 0.02690784985628254\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.4542483660130719,\n \"acc_stderr\": 0.02850980780262656,\n\ \ \"acc_norm\": 0.4542483660130719,\n \"acc_norm_stderr\": 0.02850980780262656\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6045016077170418,\n\ \ \"acc_stderr\": 0.027770918531427838,\n \"acc_norm\": 0.6045016077170418,\n\ \ \"acc_norm_stderr\": 0.027770918531427838\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5154320987654321,\n \"acc_stderr\": 0.0278074900442762,\n\ \ \"acc_norm\": 0.5154320987654321,\n \"acc_norm_stderr\": 0.0278074900442762\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.34397163120567376,\n \"acc_stderr\": 0.028338017428611327,\n \ \ \"acc_norm\": 0.34397163120567376,\n \"acc_norm_stderr\": 0.028338017428611327\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3741851368970013,\n\ \ \"acc_stderr\": 0.012359335618172056,\n \"acc_norm\": 0.3741851368970013,\n\ \ \"acc_norm_stderr\": 0.012359335618172056\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5183823529411765,\n \"acc_stderr\": 0.030352303395351964,\n\ \ \"acc_norm\": 0.5183823529411765,\n \"acc_norm_stderr\": 0.030352303395351964\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.4133986928104575,\n \"acc_stderr\": 0.019922115682786682,\n \ \ \"acc_norm\": 0.4133986928104575,\n \"acc_norm_stderr\": 0.019922115682786682\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5454545454545454,\n\ \ \"acc_stderr\": 0.04769300568972744,\n \"acc_norm\": 0.5454545454545454,\n\ \ \"acc_norm_stderr\": 0.04769300568972744\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.03136250240935893,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.03136250240935893\n },\n\ \ \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5572139303482587,\n\ \ \"acc_stderr\": 0.03512310964123937,\n \"acc_norm\": 0.5572139303482587,\n\ \ \"acc_norm_stderr\": 0.03512310964123937\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n\ \ \"acc_stderr\": 0.03799857454479636,\n \"acc_norm\": 0.39156626506024095,\n\ \ \"acc_norm_stderr\": 0.03799857454479636\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6432748538011696,\n \"acc_stderr\": 0.03674013002860954,\n\ \ \"acc_norm\": 0.6432748538011696,\n \"acc_norm_stderr\": 0.03674013002860954\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3084455324357405,\n\ \ \"mc1_stderr\": 0.01616803938315687,\n \"mc2\": 0.4524124423828304,\n\ \ \"mc2_stderr\": 0.01467368036859452\n }\n}\n```" repo_url: https://huggingface.co/tianyil1/denas-llama2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|arc:challenge|25_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hellaswag|10_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T16:03:41.588049.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T16:03:41.588049.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_30T16_03_41.588049 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T16:03:41.588049.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T16:03:41.588049.parquet' - config_name: results data_files: - split: 2023_08_30T16_03_41.588049 path: - results_2023-08-30T16:03:41.588049.parquet - split: latest path: - results_2023-08-30T16:03:41.588049.parquet --- # Dataset Card for Evaluation run of tianyil1/denas-llama2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/tianyil1/denas-llama2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [tianyil1/denas-llama2](https://huggingface.co/tianyil1/denas-llama2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_tianyil1__denas-llama2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-30T16:03:41.588049](https://huggingface.co/datasets/open-llm-leaderboard/details_tianyil1__denas-llama2/blob/main/results_2023-08-30T16%3A03%3A41.588049.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.45795528138208735, "acc_stderr": 0.03527742372208128, "acc_norm": 0.4618604191106212, "acc_norm_stderr": 0.03526366614184863, "mc1": 0.3084455324357405, "mc1_stderr": 0.01616803938315687, "mc2": 0.4524124423828304, "mc2_stderr": 0.01467368036859452 }, "harness|arc:challenge|25": { "acc": 0.4991467576791809, "acc_stderr": 0.014611369529813272, "acc_norm": 0.5392491467576792, "acc_norm_stderr": 0.014566303676636584 }, "harness|hellaswag|10": { "acc": 0.5880302728540131, "acc_stderr": 0.004911837730582202, "acc_norm": 0.7783310097590121, "acc_norm_stderr": 0.004145206350032315 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4144736842105263, "acc_stderr": 0.04008973785779206, "acc_norm": 0.4144736842105263, "acc_norm_stderr": 0.04008973785779206 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.46037735849056605, "acc_stderr": 0.030676096599389184, "acc_norm": 0.46037735849056605, "acc_norm_stderr": 0.030676096599389184 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4236111111111111, "acc_stderr": 0.0413212501972337, "acc_norm": 0.4236111111111111, "acc_norm_stderr": 0.0413212501972337 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895537, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179963, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179963 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4340425531914894, "acc_stderr": 0.03240038086792747, "acc_norm": 0.4340425531914894, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780064, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780064 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.42758620689655175, "acc_stderr": 0.041227371113703316, "acc_norm": 0.42758620689655175, "acc_norm_stderr": 0.041227371113703316 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.291005291005291, "acc_stderr": 0.023393826500484865, "acc_norm": 0.291005291005291, "acc_norm_stderr": 0.023393826500484865 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235172, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4612903225806452, "acc_stderr": 0.02835863485983692, "acc_norm": 0.4612903225806452, "acc_norm_stderr": 0.02835863485983692 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3399014778325123, "acc_stderr": 0.033327690684107895, "acc_norm": 0.3399014778325123, "acc_norm_stderr": 0.033327690684107895 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6, "acc_stderr": 0.03825460278380025, "acc_norm": 0.6, "acc_norm_stderr": 0.03825460278380025 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5505050505050505, "acc_stderr": 0.035441324919479704, "acc_norm": 0.5505050505050505, "acc_norm_stderr": 0.035441324919479704 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6787564766839378, "acc_stderr": 0.033699508685490674, "acc_norm": 0.6787564766839378, "acc_norm_stderr": 0.033699508685490674 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4230769230769231, "acc_stderr": 0.025049197876042338, "acc_norm": 0.4230769230769231, "acc_norm_stderr": 0.025049197876042338 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.37815126050420167, "acc_stderr": 0.03149930577784906, "acc_norm": 0.37815126050420167, "acc_norm_stderr": 0.03149930577784906 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943342, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943342 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6220183486238532, "acc_stderr": 0.02078918706672811, "acc_norm": 0.6220183486238532, "acc_norm_stderr": 0.02078918706672811 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03054674526495318, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03054674526495318 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5392156862745098, "acc_stderr": 0.03498501649369527, "acc_norm": 0.5392156862745098, "acc_norm_stderr": 0.03498501649369527 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6033755274261603, "acc_stderr": 0.03184399873811225, "acc_norm": 0.6033755274261603, "acc_norm_stderr": 0.03184399873811225 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5426008968609866, "acc_stderr": 0.03343577705583065, "acc_norm": 0.5426008968609866, "acc_norm_stderr": 0.03343577705583065 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5190839694656488, "acc_stderr": 0.04382094705550988, "acc_norm": 0.5190839694656488, "acc_norm_stderr": 0.04382094705550988 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4662576687116564, "acc_stderr": 0.039194155450484096, "acc_norm": 0.4662576687116564, "acc_norm_stderr": 0.039194155450484096 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.5242718446601942, "acc_stderr": 0.049449010929737795, "acc_norm": 0.5242718446601942, "acc_norm_stderr": 0.049449010929737795 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7094017094017094, "acc_stderr": 0.029745048572674085, "acc_norm": 0.7094017094017094, "acc_norm_stderr": 0.029745048572674085 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6168582375478927, "acc_stderr": 0.017384774194885627, "acc_norm": 0.6168582375478927, "acc_norm_stderr": 0.017384774194885627 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.48554913294797686, "acc_stderr": 0.02690784985628254, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.02690784985628254 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4542483660130719, "acc_stderr": 0.02850980780262656, "acc_norm": 0.4542483660130719, "acc_norm_stderr": 0.02850980780262656 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6045016077170418, "acc_stderr": 0.027770918531427838, "acc_norm": 0.6045016077170418, "acc_norm_stderr": 0.027770918531427838 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5154320987654321, "acc_stderr": 0.0278074900442762, "acc_norm": 0.5154320987654321, "acc_norm_stderr": 0.0278074900442762 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.34397163120567376, "acc_stderr": 0.028338017428611327, "acc_norm": 0.34397163120567376, "acc_norm_stderr": 0.028338017428611327 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3741851368970013, "acc_stderr": 0.012359335618172056, "acc_norm": 0.3741851368970013, "acc_norm_stderr": 0.012359335618172056 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5183823529411765, "acc_stderr": 0.030352303395351964, "acc_norm": 0.5183823529411765, "acc_norm_stderr": 0.030352303395351964 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4133986928104575, "acc_stderr": 0.019922115682786682, "acc_norm": 0.4133986928104575, "acc_norm_stderr": 0.019922115682786682 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5454545454545454, "acc_stderr": 0.04769300568972744, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.04769300568972744 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.03136250240935893, "acc_norm": 0.4, "acc_norm_stderr": 0.03136250240935893 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5572139303482587, "acc_stderr": 0.03512310964123937, "acc_norm": 0.5572139303482587, "acc_norm_stderr": 0.03512310964123937 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479636, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479636 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6432748538011696, "acc_stderr": 0.03674013002860954, "acc_norm": 0.6432748538011696, "acc_norm_stderr": 0.03674013002860954 }, "harness|truthfulqa:mc|0": { "mc1": 0.3084455324357405, "mc1_stderr": 0.01616803938315687, "mc2": 0.4524124423828304, "mc2_stderr": 0.01467368036859452 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7125934958457947, -0.8292384147644043, 0.27034398913383484, 0.25572311878204346, -0.18206551671028137, -0.06431489437818527, 0.05580538138747215, -0.22050334513187408, 0.5707154870033264, -0.071772001683712, -0.47279760241508484, -0.688800036907196, -0.452595978975296, 0.2549664080142975, -0.04815996065735817, 0.8037195205688477, -0.20175132155418396, -0.12274720519781113, 0.09167329967021942, -0.04864388331770897, -0.26394209265708923, -0.32840022444725037, -0.5125963091850281, -0.33589622378349304, 0.18082839250564575, 0.4328484833240509, 0.4189072251319885, 0.804069995880127, 0.6749179363250732, 0.29028451442718506, -0.3283778727054596, -0.02837974578142166, -0.1682652235031128, -0.3068220317363739, 0.4396551549434662, -0.3632280230522156, -0.84736168384552, 0.29957982897758484, 0.8001693487167358, 0.6272346377372742, -0.0634680837392807, 0.3128637671470642, 0.0368112213909626, 0.6148272752761841, -0.35457226634025574, 0.027725497260689735, -0.2720756530761719, 0.23647864162921906, -0.1757606863975525, -0.25936415791511536, -0.2933323383331299, -0.26919612288475037, -0.16029083728790283, -0.8725691437721252, 0.2583896517753601, 0.27118512988090515, 1.5636738538742065, -0.13483572006225586, -0.2689419388771057, 0.15571242570877075, -0.1190454512834549, 1.0279682874679565, -0.9085025787353516, 0.36248210072517395, 0.7692367434501648, 0.1348252296447754, -0.15711848437786102, -0.5900678634643555, -0.6619460582733154, 0.07363731414079666, -0.3892943561077118, 0.35930025577545166, -0.04939022287726402, -0.18410056829452515, 0.4058152437210083, 0.6582726240158081, -0.6516526341438293, 0.17710037529468536, -0.6644551157951355, -0.1331152766942978, 1.0846688747406006, 0.3528076410293579, 0.0402742363512516, -0.3509659469127655, -0.7138962149620056, -0.6439541578292847, -0.4328577518463135, 0.28440791368484497, 0.42201054096221924, 0.34432828426361084, -0.43004125356674194, 0.6830916404724121, -0.44309601187705994, 0.5775086879730225, 0.4239161014556885, 0.00601981533691287, 0.908729076385498, -0.687884509563446, -0.5552674531936646, -0.05302896723151207, 1.1006035804748535, 0.5777474045753479, 0.041176896542310715, 0.24643729627132416, 0.0019074799492955208, -0.10790716856718063, -0.011745021678507328, -0.8706724047660828, -0.27350690960884094, 0.17238283157348633, -0.3943733870983124, -0.5207312107086182, 0.35410240292549133, -0.8787141442298889, 0.12954290211200714, -0.04855730012059212, 0.4158480763435364, -0.4527876675128937, -0.13630469143390656, 0.21739205718040466, -0.39287474751472473, 0.840126633644104, -0.19174285233020782, -0.8136340379714966, 0.41316232085227966, 0.5118660926818848, 0.8106719851493835, -0.10479091852903366, -0.41723722219467163, -0.1423202008008957, -0.12657733261585236, -0.3219878077507019, 0.523019552230835, -0.265874981880188, -0.39600226283073425, -0.2818780839443207, 0.2953684329986572, -0.2758280634880066, -0.34884604811668396, 0.7251089215278625, -0.2261955440044403, 0.1813371181488037, -0.47598201036453247, -0.6231346726417542, 0.09930871427059174, 0.3978678584098816, -0.4574998617172241, 1.292340874671936, 0.2599655091762543, -0.8366000652313232, 0.41776028275489807, -0.5760740041732788, -0.1599191576242447, -0.06906314194202423, -0.06310148537158966, -0.7867307066917419, -0.3023184537887573, 0.17641355097293854, 0.420227974653244, -0.14701390266418457, -0.12560036778450012, -0.3564841151237488, -0.38747507333755493, 0.3429053723812103, -0.15234296023845673, 1.2305463552474976, -0.016175806522369385, -0.7849133610725403, -0.10393956303596497, -1.2493460178375244, 0.27393391728401184, 0.19552858173847198, -0.3933410048484802, -0.16758964955806732, -0.4745863676071167, -0.04273846745491028, 0.18117034435272217, 0.29406076669692993, -0.8110417723655701, 0.26639312505722046, -0.34829220175743103, 0.15485326945781708, 1.279443383216858, 0.02915276028215885, 0.11968358606100082, -0.5858176946640015, 0.5053378343582153, 0.25894737243652344, 0.1849866360425949, 0.41083061695098877, -0.6064833998680115, -0.8137091398239136, -0.465012788772583, -0.03686724230647087, 0.6185309290885925, -0.1817406713962555, 1.0969207286834717, 0.0849652960896492, -0.9028961062431335, -0.4242776334285736, -0.10758257657289505, 0.4893365502357483, 0.8050773739814758, 0.5947867631912231, -0.028204403817653656, -0.6438785791397095, -1.0919476747512817, -0.25220200419425964, -0.16850519180297852, 0.14619752764701843, 0.20943468809127808, 1.0047701597213745, -0.2629152536392212, 0.5958493947982788, -1.0309531688690186, -0.21495786309242249, 0.16179442405700684, -0.08099367469549179, 0.7952110767364502, 0.7515987753868103, 0.6045517921447754, -0.6679087281227112, -0.533314049243927, 0.19015923142433167, -0.891789972782135, -0.09248688071966171, 0.0940890833735466, -0.2935842275619507, 0.14491020143032074, 0.1265755593776703, -0.6892015337944031, 0.5250256657600403, 0.2379453182220459, -1.0651357173919678, 1.0890766382217407, -0.3163394033908844, 0.5767852067947388, -1.0213916301727295, 0.19124285876750946, -0.04667655751109123, 0.07997291535139084, -0.528072714805603, -0.0064775655046105385, 0.07638096809387207, 0.5012902021408081, -0.482730895280838, 0.7949960827827454, -0.675963282585144, -0.09792917221784592, 0.4841115474700928, 0.17024147510528564, -0.10983427613973618, 0.3817387819290161, -0.225560262799263, 0.8067079186439514, 0.7838765382766724, -0.5201870799064636, 0.5462725162506104, 0.4460158348083496, -0.23498208820819855, 0.693418025970459, -0.5165172219276428, -0.3087838292121887, 0.31146445870399475, -0.09019813686609268, -0.828707218170166, -0.45053109526634216, 0.061738356947898865, -0.5860319137573242, -0.11115793138742447, 0.4099806547164917, -0.25158244371414185, -0.7586489319801331, -0.9673025012016296, 0.33975932002067566, 0.7221659421920776, -0.4450923800468445, -0.21710868179798126, 0.05871547386050224, 0.08710725605487823, -0.8562591671943665, -0.8257229924201965, -0.49618127942085266, -0.22432754933834076, -0.702964186668396, 0.32231277227401733, -0.2662525773048401, -0.32978957891464233, -0.09824321419000626, -0.2278546690940857, -0.33146733045578003, 0.010204687714576721, 0.12721331417560577, 0.667635977268219, -0.42647379636764526, -0.28559622168540955, -0.22700278460979462, -0.17016470432281494, 0.2599641680717468, -0.05504677817225456, 0.35439345240592957, -0.47559359669685364, -0.4070350229740143, -0.4382113814353943, -0.05278606712818146, 0.7017303109169006, -0.05949799343943596, 0.7104261517524719, 0.4301493167877197, -0.3059239387512207, 0.021419698372483253, -0.2969680726528168, -0.28512436151504517, -0.5781323909759521, 0.2700572907924652, -0.5115739107131958, -1.0394388437271118, 0.7957363128662109, 0.536126971244812, 0.11506041139364243, 1.1415209770202637, 0.5907379984855652, -0.259116530418396, 1.037850260734558, 0.06528972834348679, 0.34055477380752563, 0.37629562616348267, -0.6900938153266907, 0.09956581890583038, -0.9694280624389648, -0.33507269620895386, -0.5881780982017517, -0.5020813345909119, -0.7188307046890259, -0.06771472096443176, 0.25836148858070374, 0.1492237150669098, -0.699709415435791, 0.5923025012016296, -0.8195807337760925, 0.5422879457473755, 0.5763076543807983, 0.2510816156864166, 0.1809433400630951, -0.15884089469909668, -0.411401629447937, -0.10244175791740417, -0.46392810344696045, -0.2663046419620514, 1.2145466804504395, 0.2729838490486145, 0.7272807359695435, 0.0934702605009079, 0.8783350586891174, 0.09706728905439377, -0.10571625828742981, -0.5621073842048645, 0.6510475277900696, 0.10607443004846573, -0.7548848986625671, -0.42845749855041504, -0.4999960958957672, -1.0994867086410522, 0.3968392014503479, -0.12493079155683517, -0.8275532722473145, 0.1576734185218811, -0.013836737722158432, -0.25067105889320374, 0.5178499817848206, -0.5093339085578918, 0.8235566020011902, -0.12952856719493866, -0.47635549306869507, 0.09239785373210907, -0.8303528428077698, 0.49944931268692017, 0.1801820546388626, 0.2425871342420578, 0.024587802588939667, 0.2650119364261627, 1.220845341682434, -0.780132532119751, 0.445004940032959, 0.08149223774671555, 0.027761252596974373, 0.33952757716178894, -0.1641722023487091, 0.4901443123817444, 0.10582903772592545, -0.030628101900219917, -0.09772134572267532, 0.33404824137687683, -0.8641901612281799, -0.05760347098112106, 0.936048150062561, -0.988195538520813, -0.6083848476409912, -0.9005773663520813, -0.5326308608055115, 0.092963308095932, 0.5564350485801697, 0.3324196934700012, 0.5389639139175415, 0.0019514126470312476, 0.4364679753780365, 0.8452735543251038, -0.1357550323009491, 0.6115748882293701, 0.24271386861801147, 0.0981154516339302, -0.6643203496932983, 0.8388862013816833, 0.07987777143716812, 0.367393434047699, 0.2703162133693695, 0.38005807995796204, -0.5428897738456726, -0.2025863379240036, -0.22452178597450256, 0.5196738243103027, -0.6404895186424255, -0.25356754660606384, -0.3711818754673004, -0.40524816513061523, -0.8012898564338684, -0.6290081143379211, -0.278843492269516, -0.5306315422058105, -0.5097311735153198, -0.5128608345985413, 0.5708741545677185, 0.508127748966217, -0.42048993706703186, 0.027575543150305748, -0.5089643001556396, 0.2855500280857086, 0.37703245878219604, 0.5481584072113037, -0.3889346718788147, -0.565475344657898, 0.06200340390205383, -0.11026284098625183, -0.5427660942077637, -0.9670447111129761, 0.354045569896698, -0.06244530901312828, 0.5383036136627197, 0.6403397917747498, 0.08256730437278748, 0.8399887681007385, -0.16720569133758545, 1.0648062229156494, 0.32480162382125854, -0.7665120959281921, 0.7068882584571838, -0.2872653901576996, 0.1471448391675949, 0.6363570690155029, 0.16522271931171417, -0.20110268890857697, -0.6583136320114136, -1.300399899482727, -0.7542238235473633, 0.6741288900375366, 0.39681679010391235, -0.2615821659564972, 0.04398179054260254, 0.14661318063735962, -0.2945341169834137, -0.1872081756591797, -0.6904399394989014, -0.8737549185752869, -0.1578850895166397, -0.4764045774936676, 0.10900329798460007, -0.04062029719352722, -0.4028305411338806, -0.8326956033706665, 0.9543823003768921, 0.0089609045535326, 0.585417628288269, 0.4875287413597107, 0.08091887831687927, 0.05069940537214279, 0.4636463224887848, 0.9634273052215576, 0.7339431047439575, -0.4587235450744629, 0.44652965664863586, 0.44102707505226135, -1.0612759590148926, 0.4754655361175537, 0.3063283860683441, -0.11237987875938416, -0.014047651551663876, 0.4584674835205078, 0.4321756660938263, 0.02575603500008583, -0.22257205843925476, 0.6071041822433472, -0.009257960133254528, -0.5763127207756042, -0.40525275468826294, 0.08156454563140869, -0.13414068520069122, -0.00008107957546599209, 0.3702825903892517, -0.1692410558462143, -0.04726274311542511, -0.4579477310180664, 0.49833056330680847, 0.3739044964313507, -0.49786487221717834, -0.1754428595304489, 0.7316994071006775, -0.18612580001354218, -0.11994101852178574, 0.32963529229164124, -0.18149253726005554, -0.617922306060791, 1.1578569412231445, 0.5989514589309692, 0.7047901749610901, -0.24299576878547668, -0.028174670413136482, 0.9671188592910767, 0.4037615954875946, -0.04698709025979042, 0.5144664645195007, 0.3109106421470642, -0.2577514350414276, 0.16819605231285095, -0.8849748969078064, -0.02536020800471306, 0.17280878126621246, -0.8121336102485657, 0.325600802898407, -0.52931147813797, -0.17384175956249237, -0.00484500452876091, 0.4473763108253479, -0.40795937180519104, 0.533447802066803, -0.4232169985771179, 1.2269773483276367, -0.9931755661964417, 0.6998137831687927, 0.7402356266975403, -0.550392210483551, -1.0503029823303223, -0.5511359572410583, -0.007723924238234758, -0.7853173613548279, 0.6250516176223755, -0.028718484565615654, 0.16693352162837982, -0.06715411692857742, -0.7053594589233398, -0.9325640797615051, 1.4377727508544922, -0.05881594493985176, -0.43884825706481934, 0.2660682797431946, -0.023644132539629936, 0.4345230162143707, 0.14885690808296204, 0.5690402388572693, 0.7863531112670898, 0.8252999782562256, -0.06517981737852097, -0.7348661422729492, 0.37337958812713623, -0.5125243067741394, -0.35070472955703735, 0.4387696385383606, -0.9307708144187927, 1.2503998279571533, -0.051004808396101, 0.19182752072811127, -0.18011431396007538, 0.6527547240257263, 0.8035420775413513, 0.30897319316864014, 0.3819766938686371, 0.9136768579483032, 0.8184329271316528, -0.4845753312110901, 0.976818323135376, -0.2190735638141632, 0.8252242803573608, 0.6644441485404968, 0.2140931636095047, 0.7598121166229248, 0.6621419191360474, -0.5650365352630615, 0.5851643085479736, 0.8279824256896973, -0.32443714141845703, 0.3886084258556366, 0.24926701188087463, -0.14301426708698273, -0.14664526283740997, 0.42597833275794983, -0.8788233995437622, 0.10090574622154236, 0.04247482866048813, -0.3262168765068054, 0.07080590724945068, -0.4954458475112915, 0.3080907464027405, -0.09107858687639236, -0.010021572932600975, 0.38284286856651306, 0.07026543468236923, -0.43728333711624146, 0.9628627896308899, -0.1326444298028946, 0.7338691353797913, -0.5009055733680725, -0.11983118206262589, -0.4200093448162079, 0.5766319632530212, -0.441362202167511, -1.019817590713501, 0.16886520385742188, 0.06773748993873596, -0.10229061543941498, -0.11781931668519974, 0.6800230145454407, -0.19306115806102753, -0.7807035446166992, 0.11842205375432968, 0.052387531846761703, 0.10373420268297195, 0.5217152833938599, -0.6544198989868164, -0.3706943690776825, -0.05388956144452095, -0.560214638710022, 0.12868265807628632, 0.27755510807037354, 0.2704056203365326, 0.5470551252365112, 0.662397027015686, 0.16248099505901337, 0.42407408356666565, -0.5384838581085205, 0.8030856847763062, -1.0549263954162598, -0.7242165803909302, -0.9437074065208435, 0.44351279735565186, -0.33880922198295593, -0.8665419220924377, 0.9969005584716797, 1.0371543169021606, 0.8678563237190247, 0.0014204721665009856, 0.6288579702377319, -0.3724943697452545, 0.24240581691265106, -0.38802874088287354, 0.9273076057434082, -0.8316154479980469, -0.20819012820720673, -0.25246432423591614, -0.7111092805862427, -0.3908614218235016, 0.8459027409553528, -0.1591387838125229, 0.016842225566506386, 1.0537155866622925, 0.6747233271598816, -0.06116611137986183, 0.0168527290225029, -0.03554939851164818, 0.5886432528495789, 0.37157881259918213, 0.9972886443138123, 0.6352308392524719, -0.7841325998306274, 0.3227771520614624, -0.5414858460426331, -0.41462671756744385, -0.3817593455314636, -0.44166630506515503, -0.8803932070732117, -0.4968887269496918, -0.21970723569393158, -0.6011197566986084, -0.13970807194709778, 0.9706957340240479, 0.42675352096557617, -0.9455958604812622, -0.42291751503944397, -0.11109310388565063, 0.19340839982032776, -0.64534991979599, -0.4146588146686554, 0.74001145362854, -0.14213711023330688, -0.5518265962600708, 0.18348535895347595, -0.1558457762002945, 0.23244456946849823, 0.08948007971048355, -0.4373323321342468, -0.7347208857536316, 0.04158612713217735, 0.45915040373802185, 0.3293912410736084, -0.6920288801193237, -0.7001916766166687, 0.3463245630264282, -0.5400876402854919, 0.4257844090461731, -0.047784700989723206, -0.5501229166984558, 0.03044014237821102, 0.7423011660575867, 0.46761205792427063, 0.6470022797584534, -0.007799440994858742, 0.05616239085793495, -0.6605467200279236, 0.16761577129364014, -0.02464420348405838, 0.2670501470565796, -0.03893779218196869, -0.3130374252796173, 0.7957584261894226, 0.6816191077232361, -0.5145228505134583, -1.1016802787780762, -0.419705867767334, -1.4512536525726318, -0.01629670336842537, 1.1555651426315308, 0.01751796156167984, -0.4685150384902954, 0.23010334372520447, -0.12966439127922058, 0.22245238721370697, -0.2779029607772827, 0.8129808902740479, 0.7435083389282227, -0.3586415946483612, 0.12333672493696213, -0.6697326898574829, 0.3888976573944092, 0.5353554487228394, -1.1870067119598389, -0.09052977710962296, 0.24417580664157867, 0.3168103098869324, 0.3482397198677063, 0.6465927958488464, -0.10044696927070618, 0.26912808418273926, 0.26394689083099365, 0.015532427467405796, -0.013201930560171604, 0.0630396157503128, -0.2376924753189087, 0.06512570381164551, -0.23741328716278076, -0.4807678461074829 ]
open-llm-leaderboard/details_DataLinguistic__DataLinguistic-34B-V1.0
open-llm-leaderboard
2023-08-30T18:13:15Z
201
0
[ "region:us" ]
null
2023-08-30T18:12:15Z
--- pretty_name: Evaluation run of DataLinguistic/DataLinguistic-34B-V1.0 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [DataLinguistic/DataLinguistic-34B-V1.0](https://huggingface.co/DataLinguistic/DataLinguistic-34B-V1.0)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DataLinguistic__DataLinguistic-34B-V1.0\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-30T18:11:50.917227](https://huggingface.co/datasets/open-llm-leaderboard/details_DataLinguistic__DataLinguistic-34B-V1.0/blob/main/results_2023-08-30T18%3A11%3A50.917227.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23205864259563588,\n\ \ \"acc_stderr\": 0.030715936843036345,\n \"acc_norm\": 0.2336046598801071,\n\ \ \"acc_norm_stderr\": 0.030731498766140584,\n \"mc1\": 0.2460220318237454,\n\ \ \"mc1_stderr\": 0.015077219200662578,\n \"mc2\": 0.48734329486879213,\n\ \ \"mc2_stderr\": 0.01631194781446388\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.23293515358361774,\n \"acc_stderr\": 0.012352507042617396,\n\ \ \"acc_norm\": 0.2764505119453925,\n \"acc_norm_stderr\": 0.013069662474252428\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2819159529974109,\n\ \ \"acc_stderr\": 0.004490130691020429,\n \"acc_norm\": 0.3296156144194384,\n\ \ \"acc_norm_stderr\": 0.004691128722535484\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n\ \ \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n\ \ \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n\ \ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n\ \ \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n\ \ \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \ \ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n\ \ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"\ acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"\ acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"\ acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"\ acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n\ \ \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n\ \ \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \ \ \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\ \ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"\ acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"\ acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"\ acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n\ \ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n\ \ \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n\ \ \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\ \ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n\ \ \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n\ \ \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n\ \ \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n\ \ \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\ \ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\ \ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\ \ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\ \ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \ \ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\ \ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\ \ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\ \ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n\ \ \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n\ \ \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\ \ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\ \ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\ \ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.2460220318237454,\n \"mc1_stderr\": 0.015077219200662578,\n\ \ \"mc2\": 0.48734329486879213,\n \"mc2_stderr\": 0.01631194781446388\n\ \ }\n}\n```" repo_url: https://huggingface.co/DataLinguistic/DataLinguistic-34B-V1.0 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|arc:challenge|25_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hellaswag|10_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:11:50.917227.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T18:11:50.917227.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_30T18_11_50.917227 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T18:11:50.917227.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T18:11:50.917227.parquet' - config_name: results data_files: - split: 2023_08_30T18_11_50.917227 path: - results_2023-08-30T18:11:50.917227.parquet - split: latest path: - results_2023-08-30T18:11:50.917227.parquet --- # Dataset Card for Evaluation run of DataLinguistic/DataLinguistic-34B-V1.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/DataLinguistic/DataLinguistic-34B-V1.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [DataLinguistic/DataLinguistic-34B-V1.0](https://huggingface.co/DataLinguistic/DataLinguistic-34B-V1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DataLinguistic__DataLinguistic-34B-V1.0", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-30T18:11:50.917227](https://huggingface.co/datasets/open-llm-leaderboard/details_DataLinguistic__DataLinguistic-34B-V1.0/blob/main/results_2023-08-30T18%3A11%3A50.917227.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23205864259563588, "acc_stderr": 0.030715936843036345, "acc_norm": 0.2336046598801071, "acc_norm_stderr": 0.030731498766140584, "mc1": 0.2460220318237454, "mc1_stderr": 0.015077219200662578, "mc2": 0.48734329486879213, "mc2_stderr": 0.01631194781446388 }, "harness|arc:challenge|25": { "acc": 0.23293515358361774, "acc_stderr": 0.012352507042617396, "acc_norm": 0.2764505119453925, "acc_norm_stderr": 0.013069662474252428 }, "harness|hellaswag|10": { "acc": 0.2819159529974109, "acc_stderr": 0.004490130691020429, "acc_norm": 0.3296156144194384, "acc_norm_stderr": 0.004691128722535484 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2460220318237454, "mc1_stderr": 0.015077219200662578, "mc2": 0.48734329486879213, "mc2_stderr": 0.01631194781446388 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7146998643875122, -0.8547424674034119, 0.2547878324985504, 0.2668590843677521, -0.1559358835220337, -0.04772788658738136, -0.041393958032131195, -0.24175946414470673, 0.5750306844711304, -0.05265902355313301, -0.484686017036438, -0.6871904134750366, -0.4118744134902954, 0.2514079511165619, -0.012106555514037609, 0.8029581308364868, -0.1653459072113037, -0.1190347671508789, 0.09611766040325165, -0.03432132303714752, -0.25722381472587585, -0.35886499285697937, -0.5026556849479675, -0.37955784797668457, 0.17516116797924042, 0.4498414993286133, 0.4350932836532593, 0.8210429549217224, 0.640610933303833, 0.2949702739715576, -0.3460322916507721, -0.07081297039985657, -0.15596725046634674, -0.2979687452316284, 0.37554994225502014, -0.35649701952934265, -0.8150062561035156, 0.32073816657066345, 0.7723057270050049, 0.6835547685623169, -0.11806538701057434, 0.2518908977508545, 0.019054295495152473, 0.5879044532775879, -0.31055504083633423, 0.06303095072507858, -0.2889617681503296, 0.20782490074634552, -0.20227308571338654, -0.2619255781173706, -0.3172600567340851, -0.24496807157993317, -0.1402759850025177, -0.8781082630157471, 0.24455483257770538, 0.3080526292324066, 1.558144450187683, -0.15901175141334534, -0.2028605341911316, 0.0903894454240799, -0.10180366784334183, 1.018262267112732, -0.8750062584877014, 0.4321621358394623, 0.8096951246261597, 0.0901145488023758, -0.16130535304546356, -0.5782309174537659, -0.658360481262207, 0.041584476828575134, -0.34401416778564453, 0.3587140440940857, -0.058151111006736755, -0.15486818552017212, 0.39452385902404785, 0.6770293712615967, -0.6937735676765442, 0.20785324275493622, -0.6787216663360596, -0.16968700289726257, 1.1073063611984253, 0.29173246026039124, 0.09543677419424057, -0.3629467189311981, -0.6517671942710876, -0.6434400081634521, -0.4051784574985504, 0.2286190539598465, 0.4621272087097168, 0.38797202706336975, -0.4219692051410675, 0.6895256042480469, -0.38753876090049744, 0.5737402439117432, 0.4101475775241852, -0.0001909359998535365, 0.9066474437713623, -0.645466148853302, -0.5048028230667114, -0.05369015038013458, 1.1248323917388916, 0.5585088133811951, 0.12734836339950562, 0.18376213312149048, 0.027353135868906975, -0.10481305420398712, 0.013123744167387486, -0.8194938898086548, -0.29526329040527344, 0.20186656713485718, -0.37436044216156006, -0.49157699942588806, 0.2886194586753845, -0.8728653192520142, 0.12958981096744537, -0.08054739236831665, 0.36697816848754883, -0.5336000919342041, -0.15769362449645996, 0.2722472846508026, -0.4020884335041046, 0.8038454651832581, -0.20866094529628754, -0.8225997090339661, 0.40078723430633545, 0.5044189095497131, 0.748826801776886, -0.10467717796564102, -0.45587241649627686, -0.07702116668224335, -0.13016758859157562, -0.28024980425834656, 0.5098975300788879, -0.2891542613506317, -0.43563178181648254, -0.268995463848114, 0.2923775613307953, -0.30303075909614563, -0.3792818784713745, 0.7549902200698853, -0.20628638565540314, 0.2537197470664978, -0.3990266025066376, -0.669622540473938, 0.1396019011735916, 0.35799703001976013, -0.4418014585971832, 1.3145776987075806, 0.24995413422584534, -0.8033359050750732, 0.4208900034427643, -0.6025422811508179, -0.21346816420555115, 0.0051576849073171616, -0.055888038128614426, -0.7650766372680664, -0.27915167808532715, 0.1633526086807251, 0.4060482084751129, -0.11536164581775665, -0.1063241958618164, -0.36700502038002014, -0.3620266914367676, 0.3369852304458618, -0.17459245026111603, 1.2637944221496582, -0.03998688980937004, -0.7687594294548035, -0.1722695231437683, -1.263710856437683, 0.317876398563385, 0.20590579509735107, -0.40786290168762207, -0.175625741481781, -0.46459004282951355, 0.007474096026271582, 0.18290238082408905, 0.2674160301685333, -0.8298342823982239, 0.28837111592292786, -0.39900603890419006, 0.1640569269657135, 1.222536325454712, -0.0015889342175796628, 0.16634488105773926, -0.5548059940338135, 0.5268086791038513, 0.19476979970932007, 0.13915205001831055, 0.38952797651290894, -0.5977736711502075, -0.8036437630653381, -0.5190826654434204, -0.04164930805563927, 0.6441206932067871, -0.21257144212722778, 1.140773057937622, 0.05315761640667915, -0.8948643803596497, -0.4913589358329773, -0.13717526197433472, 0.4874468445777893, 0.7495054602622986, 0.6020483374595642, -0.004439013544470072, -0.6327579617500305, -1.0862854719161987, -0.3318660855293274, -0.2244422733783722, 0.12650957703590393, 0.22251935303211212, 0.9675824642181396, -0.25219854712486267, 0.582189679145813, -0.985219419002533, -0.18620751798152924, 0.15019381046295166, -0.05960538610816002, 0.7946085333824158, 0.7572388052940369, 0.5743475556373596, -0.6861990690231323, -0.5185703039169312, 0.21301554143428802, -0.872478723526001, -0.10940100997686386, 0.09959148615598679, -0.25060099363327026, 0.16455703973770142, 0.16071559488773346, -0.7066042423248291, 0.497761070728302, 0.2195800244808197, -1.0400282144546509, 1.0660057067871094, -0.3099210858345032, 0.5433042645454407, -0.9948322772979736, 0.18740196526050568, -0.06419522315263748, 0.04812389239668846, -0.5056335926055908, 0.012087445706129074, 0.09446893632411957, 0.4261253774166107, -0.4897543787956238, 0.8185256719589233, -0.7081977128982544, -0.024456828832626343, 0.4588056802749634, 0.16648630797863007, -0.1375623196363449, 0.3614903390407562, -0.16347749531269073, 0.8059602975845337, 0.7857491374015808, -0.49740567803382874, 0.5317255854606628, 0.4276818633079529, -0.21355648338794708, 0.734188437461853, -0.4854258894920349, -0.28372326493263245, 0.2980056405067444, -0.07259150594472885, -0.8280810713768005, -0.46555769443511963, 0.010568157769739628, -0.623642086982727, -0.05659586563706398, 0.38510289788246155, -0.2634253203868866, -0.8283841013908386, -0.8998416066169739, 0.30635783076286316, 0.6972674131393433, -0.4388255774974823, -0.1852116733789444, 0.08445952832698822, 0.05202286317944527, -0.8079580068588257, -0.8773640990257263, -0.4509172737598419, -0.2355111688375473, -0.7180855870246887, 0.32037705183029175, -0.27790504693984985, -0.28669893741607666, -0.1328035295009613, -0.21933875977993011, -0.33366942405700684, 0.014852841384708881, 0.08471741527318954, 0.6440268158912659, -0.42488229274749756, -0.27586090564727783, -0.2583737075328827, -0.17035479843616486, 0.2343171089887619, -0.1294603794813156, 0.3416609466075897, -0.4678972661495209, -0.39409562945365906, -0.42528945207595825, -0.015303048305213451, 0.7423458695411682, -0.07148101925849915, 0.7194913625717163, 0.4240801930427551, -0.32108092308044434, 0.00598978903144598, -0.33327212929725647, -0.2757313847541809, -0.5875294208526611, 0.2836179733276367, -0.5039138793945312, -1.0635945796966553, 0.7917153239250183, 0.5316590666770935, 0.048317909240722656, 1.085378885269165, 0.6008082032203674, -0.3034014105796814, 0.9845536947250366, 0.032336555421352386, 0.2865150570869446, 0.40440887212753296, -0.6382817625999451, 0.13955968618392944, -0.9058994054794312, -0.31277531385421753, -0.5809223651885986, -0.4570513963699341, -0.7612072229385376, -0.06929195672273636, 0.2348174899816513, 0.15278106927871704, -0.6169586777687073, 0.6120949387550354, -0.8292104601860046, 0.5960484147071838, 0.559215247631073, 0.23389773070812225, 0.1578960120677948, -0.14448387920856476, -0.4125744104385376, -0.13684405386447906, -0.48277801275253296, -0.2771323621273041, 1.2007938623428345, 0.2886941134929657, 0.7276943922042847, 0.07605063915252686, 0.8781083822250366, 0.06830477714538574, -0.10001185536384583, -0.6024466753005981, 0.6476678848266602, 0.107357457280159, -0.8013083934783936, -0.44244515895843506, -0.49766165018081665, -1.1072094440460205, 0.3715275824069977, -0.10407757759094238, -0.9102634787559509, 0.11220071464776993, 0.006349407602101564, -0.16546721756458282, 0.46237725019454956, -0.5618108510971069, 0.8495596051216125, -0.13960854709148407, -0.4777137339115143, 0.08699530363082886, -0.8576731085777283, 0.44208526611328125, 0.19564032554626465, 0.25909364223480225, 0.060184333473443985, 0.2825165092945099, 1.194793462753296, -0.7777550220489502, 0.42023587226867676, 0.09012886136770248, 0.009772256016731262, 0.3121507167816162, -0.18204551935195923, 0.4714325964450836, 0.060273103415966034, -0.055362071841955185, -0.08669792860746384, 0.30351772904396057, -0.8462476134300232, -0.07075724005699158, 0.9282471537590027, -0.9251366853713989, -0.5880477428436279, -0.8664610385894775, -0.5234363675117493, 0.05937325581908226, 0.5728468894958496, 0.3770377039909363, 0.5295664668083191, -0.03550456464290619, 0.448307603597641, 0.8621978759765625, -0.14414484798908234, 0.6073524951934814, 0.29116037487983704, 0.09040415287017822, -0.6547368764877319, 0.8780344128608704, 0.09509962797164917, 0.3619987666606903, 0.2783498167991638, 0.3864820897579193, -0.5483793020248413, -0.19590795040130615, -0.21521008014678955, 0.5280094742774963, -0.6115257143974304, -0.2102026641368866, -0.3580181300640106, -0.37801122665405273, -0.7793753147125244, -0.6220853328704834, -0.3275771141052246, -0.5395370721817017, -0.4604317247867584, -0.5100017786026001, 0.5853477716445923, 0.4834236800670624, -0.41462355852127075, 0.03886093571782112, -0.49469250440597534, 0.26440417766571045, 0.3116810619831085, 0.5690656304359436, -0.4259049892425537, -0.5565037727355957, 0.028341544792056084, -0.12274803221225739, -0.5478357672691345, -0.9549481272697449, 0.3170059621334076, -0.01573464274406433, 0.538619339466095, 0.579123318195343, 0.05790948495268822, 0.8261107802391052, -0.2337307184934616, 1.062909483909607, 0.31858938932418823, -0.806311309337616, 0.7252964973449707, -0.3484640121459961, 0.18285724520683289, 0.6680840253829956, 0.18752513825893402, -0.2558366060256958, -0.6936919093132019, -1.3295592069625854, -0.7953081130981445, 0.6676134467124939, 0.4146154820919037, -0.23598046600818634, 0.003461311338469386, 0.13838565349578857, -0.3110519051551819, -0.18851882219314575, -0.6682368516921997, -0.8794065117835999, -0.1300434172153473, -0.5148606896400452, 0.1080290675163269, 0.06468652933835983, -0.37127527594566345, -0.8448494672775269, 0.944769561290741, 0.0026225291658192873, 0.6074882745742798, 0.4542771875858307, 0.07193924486637115, 0.08924467861652374, 0.50432950258255, 0.947761058807373, 0.7085126638412476, -0.4141826927661896, 0.40531107783317566, 0.4160318970680237, -1.0786612033843994, 0.4627417325973511, 0.3602668344974518, -0.051154185086488724, 0.007374533452093601, 0.4729461967945099, 0.45707401633262634, 0.051160722970962524, -0.22789986431598663, 0.6444537043571472, -0.017430773004889488, -0.5490077137947083, -0.4017389118671417, 0.0616612583398819, -0.09417535364627838, -0.03328438475728035, 0.3720219135284424, -0.20401214063167572, -0.055576059967279434, -0.4782104194164276, 0.4864220917224884, 0.34868350625038147, -0.4837898910045624, -0.17544761300086975, 0.7238256931304932, -0.15859343111515045, -0.18241600692272186, 0.36374157667160034, -0.17325764894485474, -0.6561170816421509, 1.1078460216522217, 0.6069440245628357, 0.702055037021637, -0.29676491022109985, -0.05914861336350441, 0.9161860346794128, 0.4157925844192505, -0.02868594042956829, 0.5502037405967712, 0.295953631401062, -0.2600177228450775, 0.16746874153614044, -0.850269615650177, -0.05103360861539841, 0.18338309228420258, -0.8297517895698547, 0.34787991642951965, -0.4940027594566345, -0.16517043113708496, 0.009299300611019135, 0.38036131858825684, -0.45128437876701355, 0.5911792516708374, -0.3782902956008911, 1.2379155158996582, -1.0023123025894165, 0.7031135559082031, 0.7671010494232178, -0.5607459545135498, -1.0516546964645386, -0.5111095309257507, 0.01480475626885891, -0.798431932926178, 0.562657356262207, -0.026493404060602188, 0.18735700845718384, -0.04955301061272621, -0.711049497127533, -0.8988313674926758, 1.3553290367126465, -0.023430651053786278, -0.4564698338508606, 0.2452974170446396, -0.027097636833786964, 0.4658360779285431, 0.16143952310085297, 0.594556450843811, 0.7820991277694702, 0.8251543641090393, -0.11368899792432785, -0.8226805329322815, 0.3228769600391388, -0.5276000499725342, -0.34174203872680664, 0.41903892159461975, -0.9122679233551025, 1.1471741199493408, 0.0020112795755267143, 0.17667627334594727, -0.17750662565231323, 0.6667782068252563, 0.7878801822662354, 0.27156829833984375, 0.3645865321159363, 0.9117097854614258, 0.9277324080467224, -0.4905848205089569, 1.0030022859573364, -0.2534089684486389, 0.8516612648963928, 0.7106579542160034, 0.22099679708480835, 0.7737575173377991, 0.6948150992393494, -0.5816203355789185, 0.5411330461502075, 0.7945644855499268, -0.27345219254493713, 0.3655392527580261, 0.3035276532173157, -0.13634559512138367, -0.12852418422698975, 0.4216564893722534, -0.8706517219543457, 0.1391376554965973, 0.08908963203430176, -0.327354371547699, 0.1247606948018074, -0.4086531698703766, 0.3318615257740021, -0.06039588898420334, 0.0032967596780508757, 0.4071308672428131, 0.05371660739183426, -0.4578055739402771, 0.9775959253311157, -0.16692698001861572, 0.7598676681518555, -0.5324798226356506, -0.059529900550842285, -0.36790961027145386, 0.5875397324562073, -0.45912691950798035, -1.0812252759933472, 0.1702498346567154, 0.0532715767621994, -0.12369082868099213, -0.2166518270969391, 0.683742344379425, -0.2682678997516632, -0.7500821948051453, 0.15308281779289246, 0.055599063634872437, 0.1421448290348053, 0.5111870765686035, -0.664152979850769, -0.33699163794517517, -0.04914272949099541, -0.5527068972587585, 0.0827561467885971, 0.3131883144378662, 0.29158100485801697, 0.5547798871994019, 0.6157701015472412, 0.1701536476612091, 0.409408837556839, -0.5052598118782043, 0.7925301194190979, -1.0312371253967285, -0.7439214587211609, -0.8909785747528076, 0.4723559617996216, -0.32793378829956055, -0.8582984805107117, 0.9888305068016052, 1.052549123764038, 0.9158389568328857, -0.004570669028908014, 0.6861071586608887, -0.3953111171722412, 0.28751423954963684, -0.3915780186653137, 0.9610953330993652, -0.838130533695221, -0.22584199905395508, -0.2825477123260498, -0.7053803205490112, -0.37996768951416016, 0.8816713094711304, -0.18016256392002106, 0.041424330323934555, 1.0874311923980713, 0.6860418915748596, -0.10901737958192825, 0.08013854175806046, -0.04221499338746071, 0.5854603052139282, 0.34788525104522705, 0.9503021240234375, 0.6172608733177185, -0.7871214747428894, 0.32490453124046326, -0.4981878399848938, -0.41942664980888367, -0.3491049110889435, -0.45899999141693115, -0.8646364212036133, -0.5172498822212219, -0.26586607098579407, -0.6266435980796814, -0.11902043223381042, 1.0036938190460205, 0.45928242802619934, -0.9267820715904236, -0.44213658571243286, -0.1348755806684494, 0.1513323038816452, -0.5768963098526001, -0.4212609827518463, 0.6803069710731506, -0.11399351805448532, -0.5646812319755554, 0.2557887136936188, -0.1539137065410614, 0.21701329946517944, 0.09181590378284454, -0.41146034002304077, -0.7421200275421143, 0.03240957856178284, 0.42049089074134827, 0.37400731444358826, -0.7039856314659119, -0.7062715888023376, 0.32538145780563354, -0.5433518290519714, 0.4273153841495514, -0.026220524683594704, -0.5107113122940063, 0.07876140624284744, 0.6770555973052979, 0.48487651348114014, 0.6701871752738953, -0.061881281435489655, 0.1272265762090683, -0.6865006685256958, 0.18926367163658142, 0.012111719697713852, 0.2655269503593445, 0.005155785940587521, -0.3182903826236725, 0.7769874930381775, 0.6944240927696228, -0.5297737717628479, -1.0599431991577148, -0.41636988520622253, -1.443749189376831, 0.015129637904465199, 1.1556198596954346, 0.011885883286595345, -0.46620896458625793, 0.21618227660655975, -0.15170393884181976, 0.19710251688957214, -0.3018732964992523, 0.7416709065437317, 0.8350528478622437, -0.34981483221054077, 0.12157167494297028, -0.622462272644043, 0.3806942105293274, 0.5629701018333435, -1.178108811378479, -0.06137818098068237, 0.24637402594089508, 0.30945447087287903, 0.36565497517585754, 0.6516697406768799, -0.10432802140712738, 0.26930302381515503, 0.22898030281066895, 0.05760419741272926, 0.03800797834992409, 0.061243437230587006, -0.26244381070137024, 0.023582415655255318, -0.2501358389854431, -0.4460894465446472 ]
open-llm-leaderboard/details_conceptofmind__Open-LLongMA-3b
open-llm-leaderboard
2023-08-30T22:00:44Z
201
0
[ "region:us" ]
null
2023-08-30T21:59:45Z
--- pretty_name: Evaluation run of conceptofmind/Open-LLongMA-3b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [conceptofmind/Open-LLongMA-3b](https://huggingface.co/conceptofmind/Open-LLongMA-3b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_conceptofmind__Open-LLongMA-3b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-30T21:59:22.661580](https://huggingface.co/datasets/open-llm-leaderboard/details_conceptofmind__Open-LLongMA-3b/blob/main/results_2023-08-30T21%3A59%3A22.661580.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2557876003278525,\n\ \ \"acc_stderr\": 0.03147445441526624,\n \"acc_norm\": 0.25883980680254576,\n\ \ \"acc_norm_stderr\": 0.031472248225636705,\n \"mc1\": 0.2252141982864137,\n\ \ \"mc1_stderr\": 0.014623240768023493,\n \"mc2\": 0.345076271513504,\n\ \ \"mc2_stderr\": 0.013239849784853331\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.38054607508532423,\n \"acc_stderr\": 0.014188277712349824,\n\ \ \"acc_norm\": 0.39761092150170646,\n \"acc_norm_stderr\": 0.014301752223279538\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.49153555068711413,\n\ \ \"acc_stderr\": 0.004989066355449554,\n \"acc_norm\": 0.6545508862776339,\n\ \ \"acc_norm_stderr\": 0.004745426656377574\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.035914440841969694,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.035914440841969694\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.19736842105263158,\n \"acc_stderr\": 0.03238981601699397,\n\ \ \"acc_norm\": 0.19736842105263158,\n \"acc_norm_stderr\": 0.03238981601699397\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.27,\n\ \ \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \ \ \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.27547169811320754,\n \"acc_stderr\": 0.027495663683724067,\n\ \ \"acc_norm\": 0.27547169811320754,\n \"acc_norm_stderr\": 0.027495663683724067\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2013888888888889,\n\ \ \"acc_stderr\": 0.033536474697138406,\n \"acc_norm\": 0.2013888888888889,\n\ \ \"acc_norm_stderr\": 0.033536474697138406\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.19,\n \"acc_stderr\": 0.03942772444036622,\n \"acc_norm\"\ : 0.19,\n \"acc_norm_stderr\": 0.03942772444036622\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2023121387283237,\n\ \ \"acc_stderr\": 0.030631145539198823,\n \"acc_norm\": 0.2023121387283237,\n\ \ \"acc_norm_stderr\": 0.030631145539198823\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617749,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617749\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n\ \ \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.3276595744680851,\n \"acc_stderr\": 0.030683020843231008,\n\ \ \"acc_norm\": 0.3276595744680851,\n \"acc_norm_stderr\": 0.030683020843231008\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.0414243971948936,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.0414243971948936\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2206896551724138,\n \"acc_stderr\": 0.03455930201924812,\n\ \ \"acc_norm\": 0.2206896551724138,\n \"acc_norm_stderr\": 0.03455930201924812\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.26455026455026454,\n \"acc_stderr\": 0.02271746789770861,\n \"\ acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.02271746789770861\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.19047619047619047,\n\ \ \"acc_stderr\": 0.03512207412302053,\n \"acc_norm\": 0.19047619047619047,\n\ \ \"acc_norm_stderr\": 0.03512207412302053\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25806451612903225,\n\ \ \"acc_stderr\": 0.024892469172462846,\n \"acc_norm\": 0.25806451612903225,\n\ \ \"acc_norm_stderr\": 0.024892469172462846\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.270935960591133,\n \"acc_stderr\": 0.031270907132976984,\n\ \ \"acc_norm\": 0.270935960591133,\n \"acc_norm_stderr\": 0.031270907132976984\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\"\ : 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.24242424242424243,\n \"acc_stderr\": 0.03346409881055953,\n\ \ \"acc_norm\": 0.24242424242424243,\n \"acc_norm_stderr\": 0.03346409881055953\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.22727272727272727,\n \"acc_stderr\": 0.02985751567338641,\n \"\ acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.02985751567338641\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.029778663037752954,\n\ \ \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.029778663037752954\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.22564102564102564,\n \"acc_stderr\": 0.021193632525148533,\n\ \ \"acc_norm\": 0.22564102564102564,\n \"acc_norm_stderr\": 0.021193632525148533\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.24814814814814815,\n \"acc_stderr\": 0.026335739404055803,\n \ \ \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.026335739404055803\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.23109243697478993,\n \"acc_stderr\": 0.02738140692786897,\n\ \ \"acc_norm\": 0.23109243697478993,\n \"acc_norm_stderr\": 0.02738140692786897\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.25165562913907286,\n \"acc_stderr\": 0.035433042343899844,\n \"\ acc_norm\": 0.25165562913907286,\n \"acc_norm_stderr\": 0.035433042343899844\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.2,\n \"acc_stderr\": 0.01714985851425095,\n \"acc_norm\": 0.2,\n\ \ \"acc_norm_stderr\": 0.01714985851425095\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.19907407407407407,\n \"acc_stderr\": 0.02723229846269023,\n\ \ \"acc_norm\": 0.19907407407407407,\n \"acc_norm_stderr\": 0.02723229846269023\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.23039215686274508,\n \"acc_stderr\": 0.029554292605695066,\n \"\ acc_norm\": 0.23039215686274508,\n \"acc_norm_stderr\": 0.029554292605695066\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.27848101265822783,\n \"acc_stderr\": 0.029178682304842544,\n \ \ \"acc_norm\": 0.27848101265822783,\n \"acc_norm_stderr\": 0.029178682304842544\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3721973094170404,\n\ \ \"acc_stderr\": 0.03244305283008731,\n \"acc_norm\": 0.3721973094170404,\n\ \ \"acc_norm_stderr\": 0.03244305283008731\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n\ \ \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2231404958677686,\n \"acc_stderr\": 0.03800754475228733,\n \"\ acc_norm\": 0.2231404958677686,\n \"acc_norm_stderr\": 0.03800754475228733\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2962962962962963,\n\ \ \"acc_stderr\": 0.044143436668549335,\n \"acc_norm\": 0.2962962962962963,\n\ \ \"acc_norm_stderr\": 0.044143436668549335\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.20245398773006135,\n \"acc_stderr\": 0.03157065078911902,\n\ \ \"acc_norm\": 0.20245398773006135,\n \"acc_norm_stderr\": 0.03157065078911902\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\ \ \"acc_stderr\": 0.04246624336697625,\n \"acc_norm\": 0.2767857142857143,\n\ \ \"acc_norm_stderr\": 0.04246624336697625\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690877,\n\ \ \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690877\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2777777777777778,\n\ \ \"acc_stderr\": 0.02934311479809447,\n \"acc_norm\": 0.2777777777777778,\n\ \ \"acc_norm_stderr\": 0.02934311479809447\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.28991060025542786,\n\ \ \"acc_stderr\": 0.016225017944770957,\n \"acc_norm\": 0.28991060025542786,\n\ \ \"acc_norm_stderr\": 0.016225017944770957\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.023532925431044283,\n\ \ \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.023532925431044283\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.02463004897982476,\n\ \ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.02463004897982476\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2797427652733119,\n\ \ \"acc_stderr\": 0.025494259350694905,\n \"acc_norm\": 0.2797427652733119,\n\ \ \"acc_norm_stderr\": 0.025494259350694905\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.26851851851851855,\n \"acc_stderr\": 0.024659685185967277,\n\ \ \"acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.024659685185967277\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2730496453900709,\n \"acc_stderr\": 0.026577860943307854,\n \ \ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.026577860943307854\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2392438070404172,\n\ \ \"acc_stderr\": 0.010896123652676653,\n \"acc_norm\": 0.2392438070404172,\n\ \ \"acc_norm_stderr\": 0.010896123652676653\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.026303648393696036,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.026303648393696036\n \ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\"\ : 0.24509803921568626,\n \"acc_stderr\": 0.01740181671142766,\n \"\ acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.01740181671142766\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.33636363636363636,\n\ \ \"acc_stderr\": 0.04525393596302505,\n \"acc_norm\": 0.33636363636363636,\n\ \ \"acc_norm_stderr\": 0.04525393596302505\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.19591836734693877,\n \"acc_stderr\": 0.025409301953225678,\n\ \ \"acc_norm\": 0.19591836734693877,\n \"acc_norm_stderr\": 0.025409301953225678\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n\ \ \"acc_stderr\": 0.030360490154014645,\n \"acc_norm\": 0.24378109452736318,\n\ \ \"acc_norm_stderr\": 0.030360490154014645\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3192771084337349,\n\ \ \"acc_stderr\": 0.03629335329947861,\n \"acc_norm\": 0.3192771084337349,\n\ \ \"acc_norm_stderr\": 0.03629335329947861\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n\ \ \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2252141982864137,\n\ \ \"mc1_stderr\": 0.014623240768023493,\n \"mc2\": 0.345076271513504,\n\ \ \"mc2_stderr\": 0.013239849784853331\n }\n}\n```" repo_url: https://huggingface.co/conceptofmind/Open-LLongMA-3b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|arc:challenge|25_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hellaswag|10_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-30T21:59:22.661580.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-30T21:59:22.661580.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_30T21_59_22.661580 path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T21:59:22.661580.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-30T21:59:22.661580.parquet' - config_name: results data_files: - split: 2023_08_30T21_59_22.661580 path: - results_2023-08-30T21:59:22.661580.parquet - split: latest path: - results_2023-08-30T21:59:22.661580.parquet --- # Dataset Card for Evaluation run of conceptofmind/Open-LLongMA-3b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/conceptofmind/Open-LLongMA-3b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [conceptofmind/Open-LLongMA-3b](https://huggingface.co/conceptofmind/Open-LLongMA-3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_conceptofmind__Open-LLongMA-3b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-30T21:59:22.661580](https://huggingface.co/datasets/open-llm-leaderboard/details_conceptofmind__Open-LLongMA-3b/blob/main/results_2023-08-30T21%3A59%3A22.661580.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2557876003278525, "acc_stderr": 0.03147445441526624, "acc_norm": 0.25883980680254576, "acc_norm_stderr": 0.031472248225636705, "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023493, "mc2": 0.345076271513504, "mc2_stderr": 0.013239849784853331 }, "harness|arc:challenge|25": { "acc": 0.38054607508532423, "acc_stderr": 0.014188277712349824, "acc_norm": 0.39761092150170646, "acc_norm_stderr": 0.014301752223279538 }, "harness|hellaswag|10": { "acc": 0.49153555068711413, "acc_stderr": 0.004989066355449554, "acc_norm": 0.6545508862776339, "acc_norm_stderr": 0.004745426656377574 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2222222222222222, "acc_stderr": 0.035914440841969694, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.035914440841969694 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.27547169811320754, "acc_stderr": 0.027495663683724067, "acc_norm": 0.27547169811320754, "acc_norm_stderr": 0.027495663683724067 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2013888888888889, "acc_stderr": 0.033536474697138406, "acc_norm": 0.2013888888888889, "acc_norm_stderr": 0.033536474697138406 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.19, "acc_stderr": 0.03942772444036622, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036622 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.030631145539198823, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.030631145539198823 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617749, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617749 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3276595744680851, "acc_stderr": 0.030683020843231008, "acc_norm": 0.3276595744680851, "acc_norm_stderr": 0.030683020843231008 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.0414243971948936, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.0414243971948936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2206896551724138, "acc_stderr": 0.03455930201924812, "acc_norm": 0.2206896551724138, "acc_norm_stderr": 0.03455930201924812 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.02271746789770861, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.02271746789770861 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.19047619047619047, "acc_stderr": 0.03512207412302053, "acc_norm": 0.19047619047619047, "acc_norm_stderr": 0.03512207412302053 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25806451612903225, "acc_stderr": 0.024892469172462846, "acc_norm": 0.25806451612903225, "acc_norm_stderr": 0.024892469172462846 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.270935960591133, "acc_stderr": 0.031270907132976984, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.031270907132976984 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.03346409881055953, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.22727272727272727, "acc_stderr": 0.02985751567338641, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.02985751567338641 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.029778663037752954, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.22564102564102564, "acc_stderr": 0.021193632525148533, "acc_norm": 0.22564102564102564, "acc_norm_stderr": 0.021193632525148533 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.026335739404055803, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.026335739404055803 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23109243697478993, "acc_stderr": 0.02738140692786897, "acc_norm": 0.23109243697478993, "acc_norm_stderr": 0.02738140692786897 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.25165562913907286, "acc_stderr": 0.035433042343899844, "acc_norm": 0.25165562913907286, "acc_norm_stderr": 0.035433042343899844 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.2, "acc_stderr": 0.01714985851425095, "acc_norm": 0.2, "acc_norm_stderr": 0.01714985851425095 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.19907407407407407, "acc_stderr": 0.02723229846269023, "acc_norm": 0.19907407407407407, "acc_norm_stderr": 0.02723229846269023 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23039215686274508, "acc_stderr": 0.029554292605695066, "acc_norm": 0.23039215686274508, "acc_norm_stderr": 0.029554292605695066 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.27848101265822783, "acc_stderr": 0.029178682304842544, "acc_norm": 0.27848101265822783, "acc_norm_stderr": 0.029178682304842544 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3721973094170404, "acc_stderr": 0.03244305283008731, "acc_norm": 0.3721973094170404, "acc_norm_stderr": 0.03244305283008731 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22900763358778625, "acc_stderr": 0.036853466317118506, "acc_norm": 0.22900763358778625, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2231404958677686, "acc_stderr": 0.03800754475228733, "acc_norm": 0.2231404958677686, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.044143436668549335, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.044143436668549335 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.20245398773006135, "acc_stderr": 0.03157065078911902, "acc_norm": 0.20245398773006135, "acc_norm_stderr": 0.03157065078911902 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697625, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697625 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690877, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690877 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02934311479809447, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02934311479809447 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.28991060025542786, "acc_stderr": 0.016225017944770957, "acc_norm": 0.28991060025542786, "acc_norm_stderr": 0.016225017944770957 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.023532925431044283, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.023532925431044283 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.02463004897982476, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.02463004897982476 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2797427652733119, "acc_stderr": 0.025494259350694905, "acc_norm": 0.2797427652733119, "acc_norm_stderr": 0.025494259350694905 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.26851851851851855, "acc_stderr": 0.024659685185967277, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.024659685185967277 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2730496453900709, "acc_stderr": 0.026577860943307854, "acc_norm": 0.2730496453900709, "acc_norm_stderr": 0.026577860943307854 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2392438070404172, "acc_stderr": 0.010896123652676653, "acc_norm": 0.2392438070404172, "acc_norm_stderr": 0.010896123652676653 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.25, "acc_stderr": 0.026303648393696036, "acc_norm": 0.25, "acc_norm_stderr": 0.026303648393696036 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24509803921568626, "acc_stderr": 0.01740181671142766, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.01740181671142766 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.33636363636363636, "acc_stderr": 0.04525393596302505, "acc_norm": 0.33636363636363636, "acc_norm_stderr": 0.04525393596302505 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.19591836734693877, "acc_stderr": 0.025409301953225678, "acc_norm": 0.19591836734693877, "acc_norm_stderr": 0.025409301953225678 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014645, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014645 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-virology|5": { "acc": 0.3192771084337349, "acc_stderr": 0.03629335329947861, "acc_norm": 0.3192771084337349, "acc_norm_stderr": 0.03629335329947861 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023493, "mc2": 0.345076271513504, "mc2_stderr": 0.013239849784853331 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7273399829864502, -0.8726785778999329, 0.31315916776657104, 0.1978481113910675, -0.16718244552612305, -0.06562213599681854, 0.0029789507389068604, -0.22967557609081268, 0.5769335031509399, -0.07960531115531921, -0.5060560703277588, -0.6964120268821716, -0.4348936080932617, 0.20857462286949158, -0.026079824194312096, 0.7732504606246948, -0.2004442662000656, -0.1180041953921318, 0.05282770097255707, -0.0038892836309969425, -0.2565566599369049, -0.29343804717063904, -0.49831727147102356, -0.39201340079307556, 0.1560332030057907, 0.43349021673202515, 0.40329477190971375, 0.8046683073043823, 0.694500744342804, 0.27693241834640503, -0.3048907220363617, -0.012999815866351128, -0.1945062130689621, -0.3186189830303192, 0.3955923318862915, -0.39288634061813354, -0.8370938301086426, 0.34612154960632324, 0.7550159096717834, 0.6336793303489685, -0.103980153799057, 0.2876534163951874, 0.03626065328717232, 0.5671858787536621, -0.35677361488342285, 0.07847250998020172, -0.3079977035522461, 0.2339932769536972, -0.20093312859535217, -0.2769763171672821, -0.2958100140094757, -0.2947627007961273, -0.12217570096254349, -0.868622899055481, 0.23503759503364563, 0.2927272915840149, 1.5336352586746216, -0.1330280601978302, -0.1733122020959854, 0.08277559280395508, -0.09739978611469269, 0.9905005693435669, -0.9002552628517151, 0.3283393383026123, 0.7756316065788269, 0.13951347768306732, -0.1608322560787201, -0.5875698924064636, -0.6904724836349487, 0.1272876262664795, -0.3486282229423523, 0.36408084630966187, -0.04958301782608032, -0.19954250752925873, 0.36684849858283997, 0.6791329383850098, -0.6406948566436768, 0.1746024638414383, -0.6636661291122437, -0.15622127056121826, 1.070569634437561, 0.3276017904281616, 0.07039601355791092, -0.3318445682525635, -0.6768313050270081, -0.6368248462677002, -0.4407571852207184, 0.25638142228126526, 0.43186649680137634, 0.3566630780696869, -0.36816662549972534, 0.7073358297348022, -0.40797147154808044, 0.5821480751037598, 0.3844657242298126, -0.007972529157996178, 0.8993784189224243, -0.6129360795021057, -0.5446011424064636, -0.022072389721870422, 1.0968526601791382, 0.5818155407905579, 0.081826351583004, 0.22037585079669952, 0.053330760449171066, -0.08893269300460815, 0.03543216362595558, -0.8816003799438477, -0.3034150302410126, 0.20383736491203308, -0.39629215002059937, -0.483855664730072, 0.3307033181190491, -0.8541949987411499, 0.15220250189304352, -0.022628506645560265, 0.44814157485961914, -0.5465216040611267, -0.12980465590953827, 0.2665663957595825, -0.43444252014160156, 0.8309152126312256, -0.1744932234287262, -0.7741082906723022, 0.38941794633865356, 0.5257495045661926, 0.8022684454917908, -0.1287827044725418, -0.4262809157371521, -0.04146973788738251, -0.10942169278860092, -0.3006075322628021, 0.5278171896934509, -0.24886129796504974, -0.40397727489471436, -0.28591224551200867, 0.2517993152141571, -0.2812982499599457, -0.3383391201496124, 0.6920312643051147, -0.2348000705242157, 0.22967076301574707, -0.4117431640625, -0.6883107423782349, 0.11458152532577515, 0.3968993127346039, -0.3946237862110138, 1.2347227334976196, 0.2278771549463272, -0.8243047595024109, 0.41890236735343933, -0.634954571723938, -0.16221438348293304, -0.00022780118160881102, -0.0690746009349823, -0.7587315440177917, -0.27485913038253784, 0.16348080337047577, 0.43257492780685425, -0.14142225682735443, -0.1400853544473648, -0.3960937559604645, -0.37717753648757935, 0.3563632667064667, -0.15113280713558197, 1.2812789678573608, -0.03228643909096718, -0.7593530416488647, -0.12320882827043533, -1.264552354812622, 0.317109078168869, 0.24028904736042023, -0.3387885093688965, -0.19217291474342346, -0.4570029079914093, -0.08065757155418396, 0.1932988166809082, 0.2742803394794464, -0.8431003093719482, 0.29818376898765564, -0.3438635468482971, 0.19131334125995636, 1.281190276145935, 0.05377097800374031, 0.12922368943691254, -0.5434181094169617, 0.4970105290412903, 0.21052272617816925, 0.18613262474536896, 0.3833579123020172, -0.6299421787261963, -0.8045741319656372, -0.5180874466896057, -0.06070270389318466, 0.5984646081924438, -0.20699623227119446, 1.1386489868164062, 0.05765530839562416, -0.898020327091217, -0.49383753538131714, -0.16081154346466064, 0.46451765298843384, 0.8005945682525635, 0.6004278063774109, -0.02476966567337513, -0.5675895810127258, -1.111160397529602, -0.29638969898223877, -0.1777525246143341, 0.11500667035579681, 0.22140280902385712, 1.0208702087402344, -0.2628214657306671, 0.6152600049972534, -1.0959391593933105, -0.2310567945241928, 0.18201139569282532, -0.08039186894893646, 0.8127475380897522, 0.7553854584693909, 0.5573003888130188, -0.6700644493103027, -0.4891015291213989, 0.16377811133861542, -0.8858543634414673, -0.05522902309894562, 0.15118573606014252, -0.3136698007583618, 0.0856645479798317, 0.1540863811969757, -0.721756637096405, 0.5063777565956116, 0.18484607338905334, -1.0396080017089844, 1.0130060911178589, -0.29614660143852234, 0.5507283210754395, -1.008388876914978, 0.18667040765285492, -0.07234913855791092, 0.061618831008672714, -0.49153026938438416, 0.05880056321620941, 0.10667960345745087, 0.4624263346195221, -0.4956780970096588, 0.7893725037574768, -0.6616443991661072, -0.05054229497909546, 0.4751361310482025, 0.1633760631084442, -0.1630074828863144, 0.3829048275947571, -0.23003311455249786, 0.8261614441871643, 0.761086642742157, -0.5159688591957092, 0.5265608429908752, 0.4474618434906006, -0.25134265422821045, 0.7283264994621277, -0.4787519872188568, -0.2894406020641327, 0.3093339204788208, -0.05711419880390167, -0.8079760670661926, -0.4948548674583435, 0.0631784126162529, -0.6163385510444641, -0.10404554754495621, 0.40050607919692993, -0.2937034070491791, -0.8398380279541016, -0.9311231970787048, 0.3201039135456085, 0.7238267660140991, -0.4302513003349304, -0.2012869268655777, 0.08611740916967392, 0.1112639382481575, -0.8054088950157166, -0.8687120676040649, -0.5207927823066711, -0.19351527094841003, -0.7051935791969299, 0.3435460031032562, -0.273678183555603, -0.283057302236557, -0.061557088047266006, -0.2232520580291748, -0.32721588015556335, 0.01567430980503559, 0.11457274109125137, 0.6556557416915894, -0.3991223871707916, -0.30131855607032776, -0.2893199920654297, -0.1398029923439026, 0.2936346232891083, -0.16801342368125916, 0.37214478850364685, -0.4478791356086731, -0.4200196862220764, -0.41725602746009827, -0.06632356345653534, 0.6806462407112122, -0.08072476089000702, 0.7369627952575684, 0.40689021348953247, -0.34224334359169006, -0.04323757067322731, -0.25242146849632263, -0.27121156454086304, -0.5821096897125244, 0.2740204632282257, -0.47338005900382996, -1.0669478178024292, 0.7461302876472473, 0.5340758562088013, 0.08594589680433273, 1.1462894678115845, 0.6148720383644104, -0.31649690866470337, 0.9848955869674683, 0.05456962808966637, 0.3586212694644928, 0.36800044775009155, -0.7191004753112793, 0.1576654464006424, -0.9431045055389404, -0.32617074251174927, -0.5700661540031433, -0.4792703092098236, -0.7286712527275085, -0.09691151976585388, 0.2532370388507843, 0.15422001481056213, -0.6585766673088074, 0.5788439512252808, -0.8196061253547668, 0.5774145126342773, 0.5732958912849426, 0.28607603907585144, 0.14740318059921265, -0.16693547368049622, -0.4331803321838379, -0.12457086145877838, -0.4847387969493866, -0.239890918135643, 1.243065595626831, 0.27397850155830383, 0.694257915019989, 0.10321123898029327, 0.8970885872840881, 0.07903274148702621, -0.06539583206176758, -0.6119205355644226, 0.6630930304527283, 0.11849740892648697, -0.787217915058136, -0.45028701424598694, -0.5073519349098206, -1.0590190887451172, 0.36226749420166016, -0.11467326432466507, -0.8648473620414734, 0.11291541904211044, 0.022103480994701385, -0.20566599071025848, 0.5269087553024292, -0.5018228888511658, 0.7937695980072021, -0.0948067307472229, -0.4733070433139801, 0.06610696762800217, -0.8053281307220459, 0.5186599493026733, 0.17266081273555756, 0.2535879909992218, 0.04279722273349762, 0.2856472432613373, 1.1871552467346191, -0.7988205552101135, 0.41031861305236816, 0.07613890618085861, -0.011418193578720093, 0.3541204333305359, -0.16563178598880768, 0.4568444788455963, 0.08480722457170486, -0.012378374114632607, -0.10183762013912201, 0.24851347506046295, -0.8780436515808105, -0.0670299381017685, 0.9353172779083252, -0.9714463353157043, -0.6310959458351135, -0.8683456182479858, -0.519947350025177, 0.03344610705971718, 0.6095691323280334, 0.37193241715431213, 0.5245815515518188, 0.0011326627572998405, 0.4697761535644531, 0.9123263955116272, -0.13955068588256836, 0.609386146068573, 0.20862902700901031, 0.08791916817426682, -0.6656133532524109, 0.8641037344932556, 0.08516143262386322, 0.37690970301628113, 0.2859943211078644, 0.37666696310043335, -0.5314478874206543, -0.23556822538375854, -0.19894608855247498, 0.487126886844635, -0.6474632024765015, -0.2596568763256073, -0.4114444851875305, -0.4204128682613373, -0.780086100101471, -0.6134400367736816, -0.3052713871002197, -0.5351673364639282, -0.4687296152114868, -0.5047580599784851, 0.570866584777832, 0.4667411148548126, -0.38916322588920593, 0.04513741284608841, -0.45090579986572266, 0.2866077721118927, 0.3843209147453308, 0.5953998565673828, -0.37643441557884216, -0.5344929695129395, 0.05557750165462494, -0.11478187888860703, -0.5642454624176025, -0.9331165552139282, 0.2952382266521454, -0.06798288226127625, 0.5283287167549133, 0.6047758460044861, 0.05801099166274071, 0.8447070121765137, -0.2184177190065384, 1.0615233182907104, 0.3336564004421234, -0.7740369439125061, 0.7532714605331421, -0.33000296354293823, 0.18534167110919952, 0.6402662992477417, 0.1974627822637558, -0.18725024163722992, -0.6722832322120667, -1.3224917650222778, -0.7844921946525574, 0.6695058941841125, 0.41509395837783813, -0.26339244842529297, 0.009135046042501926, 0.15047109127044678, -0.31759750843048096, -0.1601964682340622, -0.6823554039001465, -0.8986573219299316, -0.16315068304538727, -0.49017393589019775, 0.09843504428863525, 0.057022128254175186, -0.42323410511016846, -0.7964212894439697, 0.9728420376777649, 0.002816289197653532, 0.64613938331604, 0.4719015657901764, 0.05899092182517052, 0.08662813156843185, 0.45705312490463257, 0.9390605092048645, 0.75314861536026, -0.47781792283058167, 0.41588038206100464, 0.37575969099998474, -1.0609184503555298, 0.4802839756011963, 0.31504756212234497, -0.11667720228433609, -0.03753986209630966, 0.4901980459690094, 0.42470529675483704, 0.013863660395145416, -0.19695378839969635, 0.6436116099357605, -0.03158474341034889, -0.5858205556869507, -0.39052778482437134, 0.13780853152275085, -0.09125791490077972, -0.012132341042160988, 0.40821656584739685, -0.14375656843185425, -0.04955524951219559, -0.49736154079437256, 0.4719204306602478, 0.37055131793022156, -0.49242058396339417, -0.13906635344028473, 0.7174929976463318, -0.1856834888458252, -0.16534996032714844, 0.35588935017585754, -0.18841686844825745, -0.6425215005874634, 1.1319855451583862, 0.609818160533905, 0.7176522612571716, -0.22384747862815857, -0.07219165563583374, 0.9339230060577393, 0.3979293704032898, -0.04553816094994545, 0.5255556702613831, 0.3316951096057892, -0.2930803596973419, 0.18092748522758484, -0.8652281165122986, -0.0522288978099823, 0.1680300235748291, -0.8272414207458496, 0.31968453526496887, -0.5407922863960266, -0.17108771204948425, 0.012204295955598354, 0.42055344581604004, -0.4774131178855896, 0.5593446493148804, -0.40992191433906555, 1.2629889249801636, -0.9410557150840759, 0.7268655300140381, 0.7514804601669312, -0.5431944131851196, -1.032050609588623, -0.4872596263885498, 0.024490179494023323, -0.832706868648529, 0.5596032738685608, -0.014385182410478592, 0.15883156657218933, -0.0221101101487875, -0.7457260489463806, -0.9190046191215515, 1.4387551546096802, -0.09209493547677994, -0.4438422918319702, 0.2323327511548996, -0.0727132186293602, 0.43739011883735657, 0.12640780210494995, 0.550883412361145, 0.7576693892478943, 0.8111718893051147, -0.09150579571723938, -0.683565080165863, 0.3348403871059418, -0.5013843774795532, -0.3309074938297272, 0.46417200565338135, -0.9246771931648254, 1.1883432865142822, -0.011456952430307865, 0.16666431725025177, -0.2214697003364563, 0.6453446745872498, 0.7978584170341492, 0.26575732231140137, 0.3578456938266754, 0.9162637591362, 0.8715274930000305, -0.5135872960090637, 1.0221221446990967, -0.2200167328119278, 0.8311179280281067, 0.7161333560943604, 0.22353199124336243, 0.7783287763595581, 0.6774147748947144, -0.5790513753890991, 0.5348560214042664, 0.804438591003418, -0.29295745491981506, 0.3992834687232971, 0.2908005118370056, -0.07655739784240723, -0.10791020095348358, 0.4372310936450958, -0.8718061447143555, 0.11139334738254547, 0.0859040841460228, -0.31020382046699524, 0.12234476208686829, -0.47435206174850464, 0.30039557814598083, -0.07216359674930573, -0.03838407248258591, 0.39517274498939514, 0.04976094514131546, -0.444948673248291, 0.912725031375885, -0.1547800451517105, 0.7804120182991028, -0.5133283138275146, -0.07415446639060974, -0.384531706571579, 0.6115093231201172, -0.41926249861717224, -1.0695843696594238, 0.16716109216213226, 0.05411897599697113, -0.10941150784492493, -0.16154904663562775, 0.683681070804596, -0.2060069888830185, -0.7650144696235657, 0.13144731521606445, 0.07356852293014526, 0.07554551213979721, 0.5220881104469299, -0.6570576429367065, -0.3790469169616699, -0.029497740790247917, -0.5103647708892822, 0.09423837810754776, 0.32882043719291687, 0.2838574945926666, 0.560884952545166, 0.6378493905067444, 0.204854354262352, 0.3916923403739929, -0.5666158199310303, 0.7843478322029114, -1.052818775177002, -0.7371326088905334, -0.9469479322433472, 0.4236677587032318, -0.3516026735305786, -0.8710238933563232, 0.9781523942947388, 1.0125977993011475, 0.9233149290084839, 0.007097304333001375, 0.64299476146698, -0.3985811173915863, 0.24648267030715942, -0.4017961025238037, 0.9583378434181213, -0.847743034362793, -0.21664345264434814, -0.2827412188053131, -0.7326404452323914, -0.39365532994270325, 0.8622123003005981, -0.14102783799171448, -0.018438007682561874, 1.064048409461975, 0.6470869779586792, -0.10062070935964584, 0.0657777190208435, -0.07806171476840973, 0.5878629088401794, 0.4022691249847412, 0.9700055718421936, 0.6386155486106873, -0.7645275592803955, 0.31016066670417786, -0.5181707739830017, -0.4317980110645294, -0.38424935936927795, -0.47068849205970764, -0.8726813793182373, -0.4895436763763428, -0.23377203941345215, -0.6114048957824707, -0.11568339169025421, 1.0094516277313232, 0.4453440010547638, -0.9260545969009399, -0.4075379967689514, -0.13589787483215332, 0.1632196456193924, -0.6201565861701965, -0.4178815484046936, 0.7070456743240356, -0.11672212928533554, -0.517496645450592, 0.1813340187072754, -0.12020842730998993, 0.22580581903457642, 0.07185236364603043, -0.3929659128189087, -0.7474250197410583, 0.04284028708934784, 0.45545437932014465, 0.29567086696624756, -0.7079989314079285, -0.6995805501937866, 0.29865264892578125, -0.5346245169639587, 0.4150255024433136, 0.0007564391125924885, -0.5031664371490479, 0.043802980333566666, 0.6859022378921509, 0.5093918442726135, 0.6716095805168152, -0.0775580033659935, 0.12995263934135437, -0.627077579498291, 0.1822197586297989, 0.004216325469315052, 0.2703905999660492, -0.03127491474151611, -0.31469443440437317, 0.8081200122833252, 0.6636499762535095, -0.4918198585510254, -1.0660862922668457, -0.41506871581077576, -1.3891372680664062, -0.060689929872751236, 1.0956538915634155, -0.014506240375339985, -0.48014041781425476, 0.2817341983318329, -0.10034498572349548, 0.20573602616786957, -0.28951212763786316, 0.7736950516700745, 0.8034583926200867, -0.3535749316215515, 0.13571397960186005, -0.6511521935462952, 0.40005815029144287, 0.4786228537559509, -1.2061249017715454, -0.09716286510229111, 0.24589379131793976, 0.30023935437202454, 0.35942521691322327, 0.6572510004043579, -0.11661878973245621, 0.24590986967086792, 0.2206849902868271, 0.04778190329670906, 0.013341504149138927, 0.049593862146139145, -0.22316567599773407, 0.09273827821016312, -0.2630464732646942, -0.4666265547275543 ]
open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B
open-llm-leaderboard
2023-08-31T13:30:09Z
201
0
[ "region:us" ]
null
2023-08-31T13:29:07Z
--- pretty_name: Evaluation run of xzuyn/LLaMa-2-PeanutButter_v14-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [xzuyn/LLaMa-2-PeanutButter_v14-7B](https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-31T13:28:42.641649](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B/blob/main/results_2023-08-31T13%3A28%3A42.641649.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.46314650559918413,\n\ \ \"acc_stderr\": 0.0353597619312551,\n \"acc_norm\": 0.4669718546477287,\n\ \ \"acc_norm_stderr\": 0.03534376319528717,\n \"mc1\": 0.27906976744186046,\n\ \ \"mc1_stderr\": 0.0157021070906279,\n \"mc2\": 0.44677492914800465,\n\ \ \"mc2_stderr\": 0.015984529713376692\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5051194539249146,\n \"acc_stderr\": 0.014610624890309157,\n\ \ \"acc_norm\": 0.5418088737201365,\n \"acc_norm_stderr\": 0.014560220308714697\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6148177653853814,\n\ \ \"acc_stderr\": 0.004856437955719853,\n \"acc_norm\": 0.803823939454292,\n\ \ \"acc_norm_stderr\": 0.003962917115206181\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621502,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621502\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n\ \ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n\ \ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.4407894736842105,\n \"acc_stderr\": 0.040403110624904356,\n\ \ \"acc_norm\": 0.4407894736842105,\n \"acc_norm_stderr\": 0.040403110624904356\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.48,\n\ \ \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.4867924528301887,\n \"acc_stderr\": 0.030762134874500476,\n\ \ \"acc_norm\": 0.4867924528301887,\n \"acc_norm_stderr\": 0.030762134874500476\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4861111111111111,\n\ \ \"acc_stderr\": 0.04179596617581002,\n \"acc_norm\": 0.4861111111111111,\n\ \ \"acc_norm_stderr\": 0.04179596617581002\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n\ \ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.41040462427745666,\n\ \ \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.41040462427745666,\n\ \ \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179963,\n\ \ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179963\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n\ \ \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n\ \ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n\ \ \"acc_stderr\": 0.04404556157374767,\n \"acc_norm\": 0.32456140350877194,\n\ \ \"acc_norm_stderr\": 0.04404556157374767\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4413793103448276,\n \"acc_stderr\": 0.04137931034482758,\n\ \ \"acc_norm\": 0.4413793103448276,\n \"acc_norm_stderr\": 0.04137931034482758\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3148148148148148,\n \"acc_stderr\": 0.02391998416404773,\n \"\ acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02391998416404773\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n\ \ \"acc_stderr\": 0.03809523809523811,\n \"acc_norm\": 0.23809523809523808,\n\ \ \"acc_norm_stderr\": 0.03809523809523811\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4935483870967742,\n\ \ \"acc_stderr\": 0.02844163823354051,\n \"acc_norm\": 0.4935483870967742,\n\ \ \"acc_norm_stderr\": 0.02844163823354051\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.3891625615763547,\n \"acc_stderr\": 0.03430462416103872,\n\ \ \"acc_norm\": 0.3891625615763547,\n \"acc_norm_stderr\": 0.03430462416103872\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\"\ : 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.5393939393939394,\n \"acc_stderr\": 0.03892207016552013,\n\ \ \"acc_norm\": 0.5393939393939394,\n \"acc_norm_stderr\": 0.03892207016552013\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5303030303030303,\n \"acc_stderr\": 0.03555804051763929,\n \"\ acc_norm\": 0.5303030303030303,\n \"acc_norm_stderr\": 0.03555804051763929\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.6528497409326425,\n \"acc_stderr\": 0.03435696168361355,\n\ \ \"acc_norm\": 0.6528497409326425,\n \"acc_norm_stderr\": 0.03435696168361355\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.43333333333333335,\n \"acc_stderr\": 0.025124653525885124,\n\ \ \"acc_norm\": 0.43333333333333335,\n \"acc_norm_stderr\": 0.025124653525885124\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.29259259259259257,\n \"acc_stderr\": 0.027738969632176088,\n \ \ \"acc_norm\": 0.29259259259259257,\n \"acc_norm_stderr\": 0.027738969632176088\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.4327731092436975,\n \"acc_stderr\": 0.03218358107742613,\n \ \ \"acc_norm\": 0.4327731092436975,\n \"acc_norm_stderr\": 0.03218358107742613\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943342,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943342\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6238532110091743,\n\ \ \"acc_stderr\": 0.020769231968205085,\n \"acc_norm\": 0.6238532110091743,\n\ \ \"acc_norm_stderr\": 0.020769231968205085\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.27314814814814814,\n \"acc_stderr\": 0.030388051301678116,\n\ \ \"acc_norm\": 0.27314814814814814,\n \"acc_norm_stderr\": 0.030388051301678116\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.5686274509803921,\n \"acc_stderr\": 0.03476099060501636,\n \"\ acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.03476099060501636\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.5907172995780591,\n \"acc_stderr\": 0.032007041833595914,\n \ \ \"acc_norm\": 0.5907172995780591,\n \"acc_norm_stderr\": 0.032007041833595914\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.48878923766816146,\n\ \ \"acc_stderr\": 0.033549366530984746,\n \"acc_norm\": 0.48878923766816146,\n\ \ \"acc_norm_stderr\": 0.033549366530984746\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5114503816793893,\n \"acc_stderr\": 0.043841400240780176,\n\ \ \"acc_norm\": 0.5114503816793893,\n \"acc_norm_stderr\": 0.043841400240780176\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.5950413223140496,\n \"acc_stderr\": 0.04481137755942469,\n \"\ acc_norm\": 0.5950413223140496,\n \"acc_norm_stderr\": 0.04481137755942469\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4722222222222222,\n\ \ \"acc_stderr\": 0.04826217294139894,\n \"acc_norm\": 0.4722222222222222,\n\ \ \"acc_norm_stderr\": 0.04826217294139894\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5030674846625767,\n \"acc_stderr\": 0.03928297078179663,\n\ \ \"acc_norm\": 0.5030674846625767,\n \"acc_norm_stderr\": 0.03928297078179663\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\ \ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \ \ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.5145631067961165,\n \"acc_stderr\": 0.049486373240266356,\n\ \ \"acc_norm\": 0.5145631067961165,\n \"acc_norm_stderr\": 0.049486373240266356\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6752136752136753,\n\ \ \"acc_stderr\": 0.03067902276549883,\n \"acc_norm\": 0.6752136752136753,\n\ \ \"acc_norm_stderr\": 0.03067902276549883\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \ \ \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6372924648786717,\n\ \ \"acc_stderr\": 0.017192708674602302,\n \"acc_norm\": 0.6372924648786717,\n\ \ \"acc_norm_stderr\": 0.017192708674602302\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.4797687861271676,\n \"acc_stderr\": 0.026897049996382868,\n\ \ \"acc_norm\": 0.4797687861271676,\n \"acc_norm_stderr\": 0.026897049996382868\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.30837988826815643,\n\ \ \"acc_stderr\": 0.015445716910998877,\n \"acc_norm\": 0.30837988826815643,\n\ \ \"acc_norm_stderr\": 0.015445716910998877\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5130718954248366,\n \"acc_stderr\": 0.028620130800700246,\n\ \ \"acc_norm\": 0.5130718954248366,\n \"acc_norm_stderr\": 0.028620130800700246\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5305466237942122,\n\ \ \"acc_stderr\": 0.028345045864840622,\n \"acc_norm\": 0.5305466237942122,\n\ \ \"acc_norm_stderr\": 0.028345045864840622\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5061728395061729,\n \"acc_stderr\": 0.027818623962583295,\n\ \ \"acc_norm\": 0.5061728395061729,\n \"acc_norm_stderr\": 0.027818623962583295\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.36879432624113473,\n \"acc_stderr\": 0.028782227561347247,\n \ \ \"acc_norm\": 0.36879432624113473,\n \"acc_norm_stderr\": 0.028782227561347247\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.35071707953063885,\n\ \ \"acc_stderr\": 0.01218777337074152,\n \"acc_norm\": 0.35071707953063885,\n\ \ \"acc_norm_stderr\": 0.01218777337074152\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5036764705882353,\n \"acc_stderr\": 0.030372015885428188,\n\ \ \"acc_norm\": 0.5036764705882353,\n \"acc_norm_stderr\": 0.030372015885428188\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.43300653594771243,\n \"acc_stderr\": 0.02004544247332423,\n \ \ \"acc_norm\": 0.43300653594771243,\n \"acc_norm_stderr\": 0.02004544247332423\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5363636363636364,\n\ \ \"acc_stderr\": 0.04776449162396197,\n \"acc_norm\": 0.5363636363636364,\n\ \ \"acc_norm_stderr\": 0.04776449162396197\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.49795918367346936,\n \"acc_stderr\": 0.0320089533497105,\n\ \ \"acc_norm\": 0.49795918367346936,\n \"acc_norm_stderr\": 0.0320089533497105\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5970149253731343,\n\ \ \"acc_stderr\": 0.034683432951111266,\n \"acc_norm\": 0.5970149253731343,\n\ \ \"acc_norm_stderr\": 0.034683432951111266\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39759036144578314,\n\ \ \"acc_stderr\": 0.038099730845402184,\n \"acc_norm\": 0.39759036144578314,\n\ \ \"acc_norm_stderr\": 0.038099730845402184\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.03565079670708312,\n\ \ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.03565079670708312\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.27906976744186046,\n\ \ \"mc1_stderr\": 0.0157021070906279,\n \"mc2\": 0.44677492914800465,\n\ \ \"mc2_stderr\": 0.015984529713376692\n }\n}\n```" repo_url: https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|arc:challenge|25_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hellaswag|10_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_31T13_28_42.641649 path: - '**/details_harness|truthfulqa:mc|0_2023-08-31T13:28:42.641649.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-31T13:28:42.641649.parquet' - config_name: results data_files: - split: 2023_08_31T13_28_42.641649 path: - results_2023-08-31T13:28:42.641649.parquet - split: latest path: - results_2023-08-31T13:28:42.641649.parquet --- # Dataset Card for Evaluation run of xzuyn/LLaMa-2-PeanutButter_v14-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [xzuyn/LLaMa-2-PeanutButter_v14-7B](https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-31T13:28:42.641649](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B/blob/main/results_2023-08-31T13%3A28%3A42.641649.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.46314650559918413, "acc_stderr": 0.0353597619312551, "acc_norm": 0.4669718546477287, "acc_norm_stderr": 0.03534376319528717, "mc1": 0.27906976744186046, "mc1_stderr": 0.0157021070906279, "mc2": 0.44677492914800465, "mc2_stderr": 0.015984529713376692 }, "harness|arc:challenge|25": { "acc": 0.5051194539249146, "acc_stderr": 0.014610624890309157, "acc_norm": 0.5418088737201365, "acc_norm_stderr": 0.014560220308714697 }, "harness|hellaswag|10": { "acc": 0.6148177653853814, "acc_stderr": 0.004856437955719853, "acc_norm": 0.803823939454292, "acc_norm_stderr": 0.003962917115206181 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621502, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621502 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4407894736842105, "acc_stderr": 0.040403110624904356, "acc_norm": 0.4407894736842105, "acc_norm_stderr": 0.040403110624904356 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.030762134874500476, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.030762134874500476 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4861111111111111, "acc_stderr": 0.04179596617581002, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.04179596617581002 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895537, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179963, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179963 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224468, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374767, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374767 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4413793103448276, "acc_stderr": 0.04137931034482758, "acc_norm": 0.4413793103448276, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02391998416404773, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02391998416404773 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523811, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523811 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4935483870967742, "acc_stderr": 0.02844163823354051, "acc_norm": 0.4935483870967742, "acc_norm_stderr": 0.02844163823354051 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3891625615763547, "acc_stderr": 0.03430462416103872, "acc_norm": 0.3891625615763547, "acc_norm_stderr": 0.03430462416103872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5393939393939394, "acc_stderr": 0.03892207016552013, "acc_norm": 0.5393939393939394, "acc_norm_stderr": 0.03892207016552013 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5303030303030303, "acc_stderr": 0.03555804051763929, "acc_norm": 0.5303030303030303, "acc_norm_stderr": 0.03555804051763929 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6528497409326425, "acc_stderr": 0.03435696168361355, "acc_norm": 0.6528497409326425, "acc_norm_stderr": 0.03435696168361355 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.43333333333333335, "acc_stderr": 0.025124653525885124, "acc_norm": 0.43333333333333335, "acc_norm_stderr": 0.025124653525885124 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.027738969632176088, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.027738969632176088 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4327731092436975, "acc_stderr": 0.03218358107742613, "acc_norm": 0.4327731092436975, "acc_norm_stderr": 0.03218358107742613 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943342, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943342 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6238532110091743, "acc_stderr": 0.020769231968205085, "acc_norm": 0.6238532110091743, "acc_norm_stderr": 0.020769231968205085 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.27314814814814814, "acc_stderr": 0.030388051301678116, "acc_norm": 0.27314814814814814, "acc_norm_stderr": 0.030388051301678116 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5686274509803921, "acc_stderr": 0.03476099060501636, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.03476099060501636 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5907172995780591, "acc_stderr": 0.032007041833595914, "acc_norm": 0.5907172995780591, "acc_norm_stderr": 0.032007041833595914 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.48878923766816146, "acc_stderr": 0.033549366530984746, "acc_norm": 0.48878923766816146, "acc_norm_stderr": 0.033549366530984746 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5114503816793893, "acc_stderr": 0.043841400240780176, "acc_norm": 0.5114503816793893, "acc_norm_stderr": 0.043841400240780176 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5950413223140496, "acc_stderr": 0.04481137755942469, "acc_norm": 0.5950413223140496, "acc_norm_stderr": 0.04481137755942469 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5030674846625767, "acc_stderr": 0.03928297078179663, "acc_norm": 0.5030674846625767, "acc_norm_stderr": 0.03928297078179663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.5145631067961165, "acc_stderr": 0.049486373240266356, "acc_norm": 0.5145631067961165, "acc_norm_stderr": 0.049486373240266356 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6752136752136753, "acc_stderr": 0.03067902276549883, "acc_norm": 0.6752136752136753, "acc_norm_stderr": 0.03067902276549883 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6372924648786717, "acc_stderr": 0.017192708674602302, "acc_norm": 0.6372924648786717, "acc_norm_stderr": 0.017192708674602302 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.4797687861271676, "acc_stderr": 0.026897049996382868, "acc_norm": 0.4797687861271676, "acc_norm_stderr": 0.026897049996382868 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.30837988826815643, "acc_stderr": 0.015445716910998877, "acc_norm": 0.30837988826815643, "acc_norm_stderr": 0.015445716910998877 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5130718954248366, "acc_stderr": 0.028620130800700246, "acc_norm": 0.5130718954248366, "acc_norm_stderr": 0.028620130800700246 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5305466237942122, "acc_stderr": 0.028345045864840622, "acc_norm": 0.5305466237942122, "acc_norm_stderr": 0.028345045864840622 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5061728395061729, "acc_stderr": 0.027818623962583295, "acc_norm": 0.5061728395061729, "acc_norm_stderr": 0.027818623962583295 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.36879432624113473, "acc_stderr": 0.028782227561347247, "acc_norm": 0.36879432624113473, "acc_norm_stderr": 0.028782227561347247 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.35071707953063885, "acc_stderr": 0.01218777337074152, "acc_norm": 0.35071707953063885, "acc_norm_stderr": 0.01218777337074152 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5036764705882353, "acc_stderr": 0.030372015885428188, "acc_norm": 0.5036764705882353, "acc_norm_stderr": 0.030372015885428188 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.43300653594771243, "acc_stderr": 0.02004544247332423, "acc_norm": 0.43300653594771243, "acc_norm_stderr": 0.02004544247332423 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5363636363636364, "acc_stderr": 0.04776449162396197, "acc_norm": 0.5363636363636364, "acc_norm_stderr": 0.04776449162396197 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.49795918367346936, "acc_stderr": 0.0320089533497105, "acc_norm": 0.49795918367346936, "acc_norm_stderr": 0.0320089533497105 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5970149253731343, "acc_stderr": 0.034683432951111266, "acc_norm": 0.5970149253731343, "acc_norm_stderr": 0.034683432951111266 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6842105263157895, "acc_stderr": 0.03565079670708312, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.03565079670708312 }, "harness|truthfulqa:mc|0": { "mc1": 0.27906976744186046, "mc1_stderr": 0.0157021070906279, "mc2": 0.44677492914800465, "mc2_stderr": 0.015984529713376692 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6823200583457947, -0.8159134984016418, 0.28068649768829346, 0.2537234127521515, -0.16414928436279297, 0.01485159806907177, 0.013827945105731487, -0.25281545519828796, 0.5935536623001099, -0.05666986480355263, -0.49668824672698975, -0.6730837225914001, -0.45539629459381104, 0.24661113321781158, -0.08451030403375626, 0.8651089072227478, -0.19161708652973175, -0.16622349619865417, 0.09973743557929993, -0.0013834782876074314, -0.23743534088134766, -0.316477507352829, -0.45647725462913513, -0.3583725392818451, 0.17902907729148865, 0.4611780643463135, 0.45538249611854553, 0.834351122379303, 0.6712322235107422, 0.3018971383571625, -0.33080318570137024, -0.05470108240842819, -0.18951363861560822, -0.2856503129005432, 0.39091670513153076, -0.38353458046913147, -0.8079226613044739, 0.2986723780632019, 0.747107982635498, 0.6322818398475647, -0.09072098135948181, 0.3069833219051361, 0.019955897703766823, 0.5490816235542297, -0.3228936493396759, 0.02547547034919262, -0.30400142073631287, 0.2237970530986786, -0.20282751321792603, -0.2729046940803528, -0.2782714068889618, -0.25202134251594543, -0.13835211098194122, -0.8948546051979065, 0.2786215543746948, 0.3446432054042816, 1.6185612678527832, -0.11755139380693436, -0.25059249997138977, 0.09321661293506622, -0.07663943618535995, 1.027017593383789, -0.8508504629135132, 0.33166205883026123, 0.7697848677635193, 0.11420660465955734, -0.2242356389760971, -0.5983026027679443, -0.6518852710723877, 0.03336743637919426, -0.3825233280658722, 0.3262207508087158, -0.09881207346916199, -0.19184045493602753, 0.3725131154060364, 0.6986272931098938, -0.682385265827179, 0.16824454069137573, -0.6238728761672974, -0.1350759118795395, 1.0659860372543335, 0.3315848112106323, 0.05291936174035072, -0.41376814246177673, -0.7312579154968262, -0.6303945183753967, -0.41397830843925476, 0.2649216055870056, 0.40836942195892334, 0.3013570010662079, -0.37719467282295227, 0.702192485332489, -0.45880675315856934, 0.5196100473403931, 0.4293501079082489, -0.023501168936491013, 0.9012846350669861, -0.6896687746047974, -0.5171574354171753, -0.03691092133522034, 1.1080670356750488, 0.5649843215942383, 0.07298588752746582, 0.22688508033752441, 0.029123209416866302, -0.0716700628399849, 0.015283910557627678, -0.8701783418655396, -0.309369832277298, 0.18599455058574677, -0.3992990553379059, -0.567470133304596, 0.3073480427265167, -0.8668305277824402, 0.10884758830070496, 0.01307174377143383, 0.4011170566082001, -0.5017234683036804, -0.12203192710876465, 0.24107015132904053, -0.4426442086696625, 0.7952826023101807, -0.17119501531124115, -0.797740638256073, 0.42661911249160767, 0.5217800736427307, 0.7604439854621887, -0.06914012879133224, -0.4183735251426697, -0.08434643596410751, -0.11524417996406555, -0.2884027361869812, 0.5425348281860352, -0.2834334075450897, -0.4129546284675598, -0.3427022695541382, 0.30641141533851624, -0.2206629365682602, -0.351060152053833, 0.721584141254425, -0.21476984024047852, 0.17337514460086823, -0.4536399245262146, -0.647730827331543, 0.08912581205368042, 0.38522109389305115, -0.4145136773586273, 1.324323058128357, 0.28397947549819946, -0.8215113282203674, 0.4441322982311249, -0.5373121500015259, -0.17634381353855133, -0.05267908796668053, -0.0826677680015564, -0.8227361440658569, -0.2544611990451813, 0.1690445840358734, 0.41261306405067444, -0.17645002901554108, -0.13145671784877777, -0.3831171691417694, -0.3749040961265564, 0.3820352554321289, -0.16424675285816193, 1.2240194082260132, -0.07353327423334122, -0.8020737171173096, -0.09938042610883713, -1.2845838069915771, 0.32201144099235535, 0.2775842547416687, -0.39074286818504333, -0.1715235710144043, -0.45240500569343567, -0.03033350594341755, 0.19796521961688995, 0.2746453583240509, -0.8122875690460205, 0.29254698753356934, -0.3270123600959778, 0.18608428537845612, 1.2941653728485107, -0.0034872107207775116, 0.12464894354343414, -0.567667543888092, 0.5478813648223877, 0.19468986988067627, 0.140102818608284, 0.3596813678741455, -0.6327289342880249, -0.8156803250312805, -0.4964723587036133, -0.022166725248098373, 0.6215707659721375, -0.16689501702785492, 1.1395692825317383, 0.03610154986381531, -0.9009163975715637, -0.4551542103290558, -0.1344502866268158, 0.4994491934776306, 0.8027068376541138, 0.5847676396369934, -0.03875063359737396, -0.6585860252380371, -1.0974432229995728, -0.230193629860878, -0.17622852325439453, 0.1570301651954651, 0.20642276108264923, 1.0426870584487915, -0.22524963319301605, 0.6140692830085754, -1.0438542366027832, -0.16860611736774445, 0.13892875611782074, -0.10539781302213669, 0.7916755676269531, 0.7214421033859253, 0.60275799036026, -0.6345086693763733, -0.5544112324714661, 0.2240513116121292, -0.8527891635894775, -0.11255412548780441, 0.10686202347278595, -0.3203204572200775, 0.11810538917779922, 0.0873587504029274, -0.7181362509727478, 0.5369594693183899, 0.25361859798431396, -1.0578980445861816, 1.073530673980713, -0.2783409059047699, 0.5607478618621826, -0.9997277855873108, 0.1871657818555832, -0.05825147032737732, -0.009146868251264095, -0.49508747458457947, 0.012923787347972393, 0.1114407405257225, 0.484240859746933, -0.4554694890975952, 0.7736107707023621, -0.6951706409454346, -0.06758240610361099, 0.43219083547592163, 0.12769103050231934, -0.0935029685497284, 0.36397916078567505, -0.20503078401088715, 0.8148797750473022, 0.7362213730812073, -0.4245139956474304, 0.5216724872589111, 0.42897674441337585, -0.26993152499198914, 0.7020837664604187, -0.5007089972496033, -0.33510735630989075, 0.31217554211616516, -0.012224255129694939, -0.8859243988990784, -0.47375553846359253, 0.08384054154157639, -0.6144505739212036, -0.1168169379234314, 0.3854951560497284, -0.27324378490448, -0.8041716814041138, -0.9853808879852295, 0.2975085973739624, 0.728942334651947, -0.45089343190193176, -0.23205295205116272, 0.0450872927904129, 0.0839318260550499, -0.8393827676773071, -0.8496266603469849, -0.49742504954338074, -0.2220604121685028, -0.7216418981552124, 0.3672647476196289, -0.24432161450386047, -0.2554764449596405, -0.06587382405996323, -0.2721683084964752, -0.3169137239456177, 0.013183439150452614, 0.14789438247680664, 0.6550588011741638, -0.44071462750434875, -0.3120740056037903, -0.18767046928405762, -0.19762389361858368, 0.2378602921962738, -0.0822618156671524, 0.3907456696033478, -0.43348121643066406, -0.36043649911880493, -0.4075044095516205, 0.004488843958824873, 0.6584967374801636, -0.06452314555644989, 0.7079024314880371, 0.4459192752838135, -0.2954864501953125, 0.00969005562365055, -0.2958584427833557, -0.2538246214389801, -0.5843009352684021, 0.29681268334388733, -0.515369713306427, -1.0313650369644165, 0.7612243890762329, 0.525475025177002, 0.04301964491605759, 1.0737532377243042, 0.6121140718460083, -0.299623966217041, 1.0189567804336548, 0.06496809422969818, 0.3124896287918091, 0.3964018225669861, -0.6809969544410706, 0.10340522229671478, -0.9520872831344604, -0.31680816411972046, -0.5675250887870789, -0.5020922422409058, -0.7232848405838013, -0.09256026148796082, 0.26129674911499023, 0.1309250295162201, -0.6414631009101868, 0.6169498562812805, -0.8273605704307556, 0.5935265421867371, 0.5804925560951233, 0.2736073136329651, 0.19580167531967163, -0.1381567120552063, -0.37353959679603577, -0.10975810885429382, -0.44829338788986206, -0.2510545253753662, 1.224465250968933, 0.25367075204849243, 0.7210406064987183, 0.07321259379386902, 0.8887079954147339, 0.0714293047785759, -0.09455190598964691, -0.5765174627304077, 0.656525194644928, 0.1425759494304657, -0.8119814395904541, -0.3948505222797394, -0.5065588355064392, -1.1217782497406006, 0.45004600286483765, -0.12023860961198807, -0.8940587639808655, 0.11591930687427521, 0.03283156082034111, -0.22697670757770538, 0.4784908890724182, -0.539873480796814, 0.8363614678382874, -0.16872283816337585, -0.4690798819065094, 0.058406274765729904, -0.8627554774284363, 0.48883306980133057, 0.20879557728767395, 0.20929238200187683, -0.02862941473722458, 0.21067750453948975, 1.155821442604065, -0.8348950147628784, 0.433708131313324, 0.1319722831249237, 0.03246108815073967, 0.3100951611995697, -0.19127286970615387, 0.5362579226493835, 0.1016283854842186, -0.02116430178284645, -0.1056281328201294, 0.25814956426620483, -0.8542651534080505, -0.04533477872610092, 0.9280364513397217, -0.9680454730987549, -0.6384167671203613, -0.9108133316040039, -0.5155718326568604, 0.10767331719398499, 0.5733827948570251, 0.38038334250450134, 0.4523738622665405, 0.015359312295913696, 0.44123613834381104, 0.8465476632118225, -0.11060434579849243, 0.5860190391540527, 0.2424936443567276, 0.06864719092845917, -0.6588775515556335, 0.8639603853225708, 0.0973433330655098, 0.362226277589798, 0.3082635998725891, 0.3933488726615906, -0.5031777620315552, -0.17577563226222992, -0.24557283520698547, 0.5108045339584351, -0.617759644985199, -0.29492923617362976, -0.38016730546951294, -0.36786991357803345, -0.734480082988739, -0.6317591071128845, -0.29168546199798584, -0.5738329291343689, -0.5051501393318176, -0.48938971757888794, 0.6075354814529419, 0.47954216599464417, -0.3685823678970337, 0.07463216781616211, -0.4657655954360962, 0.26701584458351135, 0.3031754791736603, 0.5146694779396057, -0.3768846392631531, -0.5610955953598022, 0.052357859909534454, -0.1660441756248474, -0.5929471850395203, -1.0302660465240479, 0.34957149624824524, -0.051779113709926605, 0.5128958821296692, 0.5834395885467529, 0.04859527200460434, 0.8725870251655579, -0.21110853552818298, 1.015081524848938, 0.3253878951072693, -0.8295163512229919, 0.744328498840332, -0.296983927488327, 0.13138970732688904, 0.6011074185371399, 0.20978210866451263, -0.22263148427009583, -0.660007894039154, -1.303864598274231, -0.8184801340103149, 0.6814318895339966, 0.4125750958919525, -0.2781409025192261, 0.047509096562862396, 0.11894151568412781, -0.2579565644264221, -0.13984909653663635, -0.6922528743743896, -0.8804577589035034, -0.15694621205329895, -0.4694804847240448, 0.13994993269443512, 0.023010671138763428, -0.34369781613349915, -0.8276103734970093, 0.9289985299110413, 0.017745796591043472, 0.5765146613121033, 0.43607115745544434, 0.051529884338378906, 0.05525147169828415, 0.47205352783203125, 0.9109665155410767, 0.7043591141700745, -0.44918662309646606, 0.4048817753791809, 0.41410768032073975, -1.0239113569259644, 0.5235239863395691, 0.32423135638237, -0.06639177352190018, -0.023572521284222603, 0.4447771906852722, 0.4372296631336212, 0.07024264335632324, -0.23779530823230743, 0.6280248761177063, 0.004666696302592754, -0.5204343199729919, -0.3884453773498535, 0.11250770092010498, -0.11830230057239532, 0.003849769476801157, 0.36016348004341125, -0.16611722111701965, -0.028948534280061722, -0.47472360730171204, 0.4663042724132538, 0.36318445205688477, -0.4187815487384796, -0.188907191157341, 0.7218238115310669, -0.1850832849740982, -0.08572982996702194, 0.36252957582473755, -0.20662637054920197, -0.6380606293678284, 1.1362879276275635, 0.6354202032089233, 0.6567515134811401, -0.28177884221076965, -0.06258658319711685, 0.8755616545677185, 0.40369951725006104, -0.01451736968010664, 0.5429238677024841, 0.2712942063808441, -0.2479708343744278, 0.1876918524503708, -0.8254689574241638, -0.0372641384601593, 0.16097372770309448, -0.8201215863227844, 0.34589651226997375, -0.5199880599975586, -0.19774793088436127, 0.03503534942865372, 0.44407352805137634, -0.4162464141845703, 0.5373780131340027, -0.40843141078948975, 1.2098063230514526, -0.9976802468299866, 0.7293121814727783, 0.7432038187980652, -0.5533774495124817, -1.0103623867034912, -0.5411078929901123, 0.029829692095518112, -0.8480777740478516, 0.5380979776382446, -0.02575008198618889, 0.2072097212076187, -0.09215840697288513, -0.6831039786338806, -0.9997216463088989, 1.429621696472168, -0.047208819538354874, -0.47022080421447754, 0.2206299901008606, -0.04992692172527313, 0.4181579649448395, 0.1743832528591156, 0.605387270450592, 0.8009273409843445, 0.8133040070533752, -0.10058604925870895, -0.7549663782119751, 0.3843313753604889, -0.521938145160675, -0.2916550934314728, 0.4802044630050659, -0.9580079317092896, 1.2049024105072021, -0.03844621777534485, 0.2002618908882141, -0.11932564526796341, 0.6866899728775024, 0.8252552151679993, 0.24242062866687775, 0.34595251083374023, 0.8976337909698486, 0.8825646638870239, -0.4972197711467743, 1.0264276266098022, -0.21344392001628876, 0.8670581579208374, 0.6959160566329956, 0.2074345052242279, 0.7649611830711365, 0.6844425797462463, -0.5803635120391846, 0.5935896039009094, 0.834712564945221, -0.3353545665740967, 0.3944506049156189, 0.31501126289367676, -0.12295345962047577, -0.13630540668964386, 0.4217751622200012, -0.8841115236282349, 0.10796776413917542, 0.11669314652681351, -0.36837583780288696, 0.079890675842762, -0.4709966778755188, 0.37759605050086975, -0.13260866701602936, -0.001339647569693625, 0.3324052691459656, 0.05070873722434044, -0.4127972722053528, 0.9393187761306763, -0.13883525133132935, 0.7688930630683899, -0.5218344926834106, -0.09075756371021271, -0.3624180555343628, 0.5978259444236755, -0.4733099639415741, -1.0969069004058838, 0.15585118532180786, 0.08902411162853241, -0.15279512107372284, -0.14570769667625427, 0.7037319540977478, -0.17483648657798767, -0.7814313173294067, 0.12656570971012115, 0.02946379780769348, 0.10156998783349991, 0.49736088514328003, -0.6838341951370239, -0.28800585865974426, -0.03963501751422882, -0.6045693159103394, 0.1157597228884697, 0.2942006289958954, 0.24007892608642578, 0.5646727681159973, 0.6283113956451416, 0.13591420650482178, 0.4451790452003479, -0.5846525430679321, 0.8310216665267944, -1.0609285831451416, -0.7206985950469971, -0.9155106544494629, 0.47206881642341614, -0.3438588082790375, -0.8929622173309326, 1.006668210029602, 1.0314621925354004, 0.9167291522026062, 0.013914682902395725, 0.6686383485794067, -0.410372257232666, 0.2619069218635559, -0.3675386309623718, 0.9069525003433228, -0.8523797988891602, -0.2192373126745224, -0.27381831407546997, -0.6699655055999756, -0.3573687672615051, 0.8885443210601807, -0.16185086965560913, 0.045610010623931885, 1.0642485618591309, 0.6943678855895996, -0.11805907636880875, 0.0400138683617115, -0.059743937104940414, 0.5932513475418091, 0.3812852203845978, 1.0017645359039307, 0.6764018535614014, -0.7909632325172424, 0.3766583204269409, -0.5609110593795776, -0.4321970045566559, -0.4325326085090637, -0.45841914415359497, -0.8463783860206604, -0.47266507148742676, -0.22974984347820282, -0.613817036151886, -0.11671841144561768, 1.0357532501220703, 0.4790422022342682, -0.9064404964447021, -0.4363853633403778, -0.10983385145664215, 0.1899736374616623, -0.5943208336830139, -0.4098276197910309, 0.6872463226318359, -0.11026828736066818, -0.5364329814910889, 0.20578864216804504, -0.1445656418800354, 0.2189161330461502, 0.1132783368229866, -0.38376525044441223, -0.6896613836288452, 0.026906244456768036, 0.4529610872268677, 0.32569044828414917, -0.7228267192840576, -0.6862441897392273, 0.3220590054988861, -0.520453155040741, 0.48796603083610535, -0.025499651208519936, -0.558525800704956, -0.002665809355676174, 0.7190327048301697, 0.45469820499420166, 0.6856232285499573, -0.0431554839015007, 0.05466802045702934, -0.6946991682052612, 0.1485435515642166, -0.034670233726501465, 0.2957768738269806, -0.02852110005915165, -0.3164832890033722, 0.7889590263366699, 0.661566972732544, -0.5253544449806213, -1.0802093744277954, -0.4160795509815216, -1.452910304069519, -0.025206349790096283, 1.1101865768432617, 0.0014863177202641964, -0.5329403281211853, 0.26258909702301025, -0.15608978271484375, 0.17763011157512665, -0.3553292751312256, 0.7813282012939453, 0.7617712020874023, -0.377697616815567, 0.10688767582178116, -0.6205387115478516, 0.3446652293205261, 0.5488744974136353, -1.2165148258209229, -0.10633423179388046, 0.2124270796775818, 0.36638280749320984, 0.36827224493026733, 0.6420847773551941, -0.07606333494186401, 0.29934775829315186, 0.2647814452648163, 0.024601386860013008, 0.04373389109969139, 0.059870582073926926, -0.21935629844665527, 0.042813923209905624, -0.2502010762691498, -0.4780983626842499 ]
open-llm-leaderboard/details_NewstaR__Morningstar-13b-hf
open-llm-leaderboard
2023-10-24T11:48:03Z
201
0
[ "region:us" ]
null
2023-09-15T10:46:47Z
--- pretty_name: Evaluation run of NewstaR/Morningstar-13b-hf dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [NewstaR/Morningstar-13b-hf](https://huggingface.co/NewstaR/Morningstar-13b-hf)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NewstaR__Morningstar-13b-hf\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-24T11:47:49.935503](https://huggingface.co/datasets/open-llm-leaderboard/details_NewstaR__Morningstar-13b-hf/blob/main/results_2023-10-24T11-47-49.935503.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1782718120805369,\n\ \ \"em_stderr\": 0.003919630092588375,\n \"f1\": 0.2387195889261742,\n\ \ \"f1_stderr\": 0.003944947017182046,\n \"acc\": 0.448727630233375,\n\ \ \"acc_stderr\": 0.011074189612085313\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.1782718120805369,\n \"em_stderr\": 0.003919630092588375,\n\ \ \"f1\": 0.2387195889261742,\n \"f1_stderr\": 0.003944947017182046\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.15238817285822592,\n \ \ \"acc_stderr\": 0.009899572254794204\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.745067087608524,\n \"acc_stderr\": 0.012248806969376422\n\ \ }\n}\n```" repo_url: https://huggingface.co/NewstaR/Morningstar-13b-hf leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|arc:challenge|25_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-15T10-46-30.957408.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_24T11_47_49.935503 path: - '**/details_harness|drop|3_2023-10-24T11-47-49.935503.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-24T11-47-49.935503.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_24T11_47_49.935503 path: - '**/details_harness|gsm8k|5_2023-10-24T11-47-49.935503.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-24T11-47-49.935503.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hellaswag|10_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-15T10-46-30.957408.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-management|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T10-46-30.957408.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_15T10_46_30.957408 path: - '**/details_harness|truthfulqa:mc|0_2023-09-15T10-46-30.957408.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-15T10-46-30.957408.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_24T11_47_49.935503 path: - '**/details_harness|winogrande|5_2023-10-24T11-47-49.935503.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-24T11-47-49.935503.parquet' - config_name: results data_files: - split: 2023_09_15T10_46_30.957408 path: - results_2023-09-15T10-46-30.957408.parquet - split: 2023_10_24T11_47_49.935503 path: - results_2023-10-24T11-47-49.935503.parquet - split: latest path: - results_2023-10-24T11-47-49.935503.parquet --- # Dataset Card for Evaluation run of NewstaR/Morningstar-13b-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NewstaR/Morningstar-13b-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [NewstaR/Morningstar-13b-hf](https://huggingface.co/NewstaR/Morningstar-13b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NewstaR__Morningstar-13b-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T11:47:49.935503](https://huggingface.co/datasets/open-llm-leaderboard/details_NewstaR__Morningstar-13b-hf/blob/main/results_2023-10-24T11-47-49.935503.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1782718120805369, "em_stderr": 0.003919630092588375, "f1": 0.2387195889261742, "f1_stderr": 0.003944947017182046, "acc": 0.448727630233375, "acc_stderr": 0.011074189612085313 }, "harness|drop|3": { "em": 0.1782718120805369, "em_stderr": 0.003919630092588375, "f1": 0.2387195889261742, "f1_stderr": 0.003944947017182046 }, "harness|gsm8k|5": { "acc": 0.15238817285822592, "acc_stderr": 0.009899572254794204 }, "harness|winogrande|5": { "acc": 0.745067087608524, "acc_stderr": 0.012248806969376422 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.44217178225517273, -0.7257723808288574, 0.22016552090644836, 0.3153785765171051, -0.1461331993341446, 0.20502662658691406, -0.4015941619873047, -0.19492049515247345, 0.4152980148792267, 0.5062764883041382, -0.7843478322029114, -1.0255333185195923, -0.6701681017875671, 0.25871536135673523, -0.20284613966941833, 1.168455958366394, -0.19460885226726532, -0.2878243029117584, 0.0667814314365387, -0.2636614739894867, -0.2447388619184494, -0.4107109010219574, -0.5222264528274536, -0.3816904127597809, 0.40588799118995667, 0.6009533405303955, 0.45879724621772766, 0.692586362361908, 0.7655518054962158, 0.35688287019729614, -0.1392858624458313, 0.20136462152004242, -0.4347478151321411, -0.13737262785434723, 0.2683489918708801, -0.49659082293510437, -0.7117841839790344, 0.12772130966186523, 0.7319493293762207, 0.4935038685798645, -0.23121556639671326, 0.6733824014663696, 0.08199743926525116, 0.7196899056434631, -0.4684869647026062, 0.4294683337211609, -0.29740142822265625, -0.029879095032811165, -0.3713306486606598, -0.1714838594198227, 0.004163194447755814, -0.3734443783760071, -0.21633164584636688, -0.5445495843887329, 0.16089046001434326, 0.19054120779037476, 1.1591476202011108, 0.15753202140331268, -0.13104787468910217, -0.16985926032066345, -0.33415088057518005, 0.8295415639877319, -0.9226729273796082, 0.04488479346036911, 0.6155278086662292, 0.2308386266231537, -0.23560993373394012, -0.5390733480453491, -0.32650741934776306, -0.07231384515762329, -0.2798668146133423, 0.15311752259731293, -0.08143342286348343, -0.16364876925945282, 0.36109116673469543, 0.6850710511207581, -0.6581247448921204, -0.022259315475821495, -0.5778633952140808, -0.14585040509700775, 0.950594961643219, 0.4085652530193329, -0.008212207816541195, -0.5987315773963928, -0.3435176908969879, -0.3222808837890625, -0.46603360772132874, 0.2263316661119461, 0.5149285793304443, 0.5823901295661926, -0.6497414112091064, 0.7696026563644409, -0.47617772221565247, 0.5503032803535461, -0.012914866209030151, -0.26039114594459534, 0.8829250931739807, -0.6670490503311157, -0.24430808424949646, -0.0023706175852566957, 1.064939260482788, 0.47184616327285767, -0.01363711804151535, 0.21002580225467682, -0.3240662217140198, -0.09048096835613251, 0.10788793116807938, -0.819439172744751, -0.09968598186969757, 0.3985196053981781, -0.6193230748176575, -0.5272388458251953, 0.28066447377204895, -0.9786350727081299, -0.17443370819091797, -0.3108137547969818, 0.18545521795749664, -0.12259877473115921, -0.41800957918167114, -0.020024770870804787, -0.11357273161411285, 0.2352510243654251, 0.0683218389749527, -0.5771003365516663, 0.3615431487560272, 0.6554619669914246, 0.9995501637458801, -0.060533083975315094, -0.33594971895217896, -0.37106990814208984, -0.25749287009239197, -0.15667350590229034, 0.46625906229019165, -0.27094194293022156, -0.4016185700893402, -0.11834029853343964, 0.35528743267059326, -0.32695555686950684, -0.6201108694076538, 0.7358414530754089, -0.24517783522605896, 0.1966870129108429, -0.3902595043182373, -0.3903281092643738, -0.11824671179056168, 0.3879101574420929, -0.6219785213470459, 1.4377977848052979, 0.4124350845813751, -0.9229865670204163, 0.07851476222276688, -0.8604301810264587, -0.23944450914859772, -0.007829559035599232, -0.037777889519929886, -0.6304256916046143, -0.16736114025115967, 0.18963554501533508, 0.6190102100372314, -0.326826810836792, 0.106263168156147, -0.24185283482074738, -0.4055211544036865, 0.07314074784517288, -0.06017865985631943, 0.9977547526359558, 0.25942811369895935, -0.43966230750083923, 0.16705211997032166, -1.046452522277832, 0.03684317320585251, 0.3050094246864319, -0.6020169258117676, -0.20243670046329498, -0.29303374886512756, 0.22683651745319366, 0.15451128780841827, 0.5205453634262085, -0.6225911378860474, 0.2837861180305481, -0.1412520557641983, 0.33249160647392273, 0.9342343211174011, 0.040798369795084, 0.22771325707435608, -0.47343575954437256, 0.5858063101768494, 0.07728374749422073, 0.31614115834236145, 0.10881705582141876, -0.47352495789527893, -0.7319048047065735, -0.17408260703086853, 0.136087566614151, 0.6572707891464233, -0.40656358003616333, 0.7653478384017944, -0.4121134877204895, -0.7078135013580322, -0.659716784954071, 0.12640611827373505, 0.4619884490966797, 0.5957402586936951, 0.3720376491546631, -0.18184946477413177, -0.7590957880020142, -1.0583387613296509, 0.0646420568227768, -0.21457447111606598, 0.10400490462779999, 0.5386105179786682, 0.9220296740531921, -0.366160124540329, 0.6218687295913696, -0.7335953116416931, -0.346223920583725, -0.28417250514030457, 0.0924619510769844, 0.8182085156440735, 0.5022832155227661, 0.4916704297065735, -0.6005333662033081, -0.3131179213523865, -0.01939387060701847, -0.812714695930481, -0.26885783672332764, -0.20951268076896667, -0.30144330859184265, 0.28174886107444763, 0.05472452566027641, -0.4926549196243286, 0.505346417427063, 0.5593084096908569, -0.6555770039558411, 0.7939038276672363, 0.015600672923028469, 0.42894718050956726, -1.1895239353179932, 0.19631820917129517, 0.15234291553497314, -0.014748377725481987, -0.3231470584869385, -0.1409011334180832, 0.038523703813552856, 0.2724139094352722, -0.40918371081352234, 0.5973336100578308, -0.3823373317718506, -0.23075063526630402, 0.002399018732830882, 0.008228281512856483, -0.09619426727294922, 0.5754772424697876, -0.30867403745651245, 0.9070584177970886, 0.4751528799533844, -0.4071503281593323, 0.3047129511833191, 0.5080812573432922, -0.5585711598396301, 0.24048326909542084, -0.5125683546066284, -0.10910916328430176, 0.12910416722297668, 0.11498848348855972, -0.8815857768058777, -0.3537348806858063, 0.44859081506729126, -0.49737608432769775, 0.14713551104068756, -0.23298823833465576, -0.4979346990585327, -0.5174440145492554, -0.48217904567718506, 0.10176043212413788, 0.45256760716438293, -0.4559005796909332, 0.28014227747917175, 0.34180930256843567, -0.0018163681961596012, -0.7349123954772949, -0.7019692659378052, -0.08613383769989014, -0.31236594915390015, -0.7126602530479431, 0.44472840428352356, -0.10739826411008835, -0.3335340619087219, 0.06698718667030334, -0.16521236300468445, -0.03384486213326454, 0.1534382551908493, 0.4139101803302765, 0.505586564540863, -0.20312601327896118, -0.41183021664619446, -0.1598115712404251, -0.1652410328388214, 0.1941092610359192, 0.21443666517734528, 0.5784603953361511, -0.39861762523651123, -0.2026102989912033, -0.2796964645385742, 0.16013303399085999, 0.5365058183670044, -0.16304078698158264, 0.8775448799133301, 0.7209281921386719, -0.24217310547828674, 0.06237266957759857, -0.434165894985199, -0.03927590698003769, -0.4926466643810272, 0.3632490634918213, -0.28915029764175415, -0.8170602321624756, 0.8528187274932861, 0.21793513000011444, 0.21913516521453857, 0.6954720616340637, 0.5823191404342651, 0.06850162893533707, 0.703562319278717, 0.3202289044857025, -0.11713368445634842, 0.5443976521492004, -0.7842055559158325, -0.06682322174310684, -1.0964611768722534, -0.41374075412750244, -0.5006411671638489, -0.44813525676727295, -0.7798625826835632, -0.27526530623435974, 0.23439465463161469, 0.11472946405410767, -0.46717026829719543, 0.5727649927139282, -0.6076401472091675, 0.2751428484916687, 0.6832102537155151, 0.23037376999855042, 0.1313478797674179, -0.09857194125652313, -0.14346979558467865, 0.2613389492034912, -0.4339393675327301, -0.4510975778102875, 1.388240933418274, 0.2338586449623108, 0.6542019844055176, -0.054416459053754807, 1.0025367736816406, 0.31875842809677124, 0.2592090666294098, -0.4971664249897003, 0.628815770149231, -0.06990375369787216, -0.5691249370574951, -0.16043305397033691, -0.5846433639526367, -0.8566829562187195, 0.17208798229694366, -0.09549851715564728, -0.8035678863525391, 0.15644747018814087, -0.052473489195108414, -0.19712279736995697, 0.31829044222831726, -0.5795822739601135, 0.8635623455047607, -0.17331892251968384, -0.4043911397457123, 0.022251447662711143, -0.7990779280662537, 0.48043909668922424, 0.04400406405329704, 0.38946858048439026, -0.3490805923938751, 0.0738496333360672, 1.178833246231079, -0.6129955053329468, 0.7327153086662292, -0.2242608517408371, 0.1253783255815506, 0.305511474609375, -0.3820907175540924, 0.5932551026344299, 0.010835492983460426, -0.26430070400238037, 0.46017104387283325, -0.11881003528833389, -0.3008386492729187, -0.26611632108688354, 0.8487378358840942, -0.9205625057220459, -0.3980652689933777, -0.5058653354644775, -0.5496519207954407, 0.21042487025260925, 0.24995237588882446, 0.28944480419158936, 0.32065555453300476, 0.043334681540727615, 0.3162580132484436, 0.1720828115940094, -0.12884867191314697, 0.5806947946548462, 0.46086472272872925, -0.23627281188964844, -0.706234872341156, 0.6063624620437622, 0.1991419494152069, 0.029767515137791634, 0.22104062139987946, 0.06073160842061043, -0.529863715171814, -0.5089325308799744, -0.3416106402873993, 0.3064580261707306, -0.5414063930511475, -0.35394486784935, -0.49191924929618835, -0.26843178272247314, -0.32888245582580566, -0.01800249144434929, -0.48209095001220703, -0.4808007478713989, -0.4047943353652954, -0.3121313452720642, 0.6157910227775574, 0.5512198209762573, -0.4266600012779236, 0.36193904280662537, -0.7926958203315735, 0.18740151822566986, -0.1925273835659027, 0.3594508469104767, -0.14189083874225616, -0.4869306981563568, -0.4814835488796234, 0.13397447764873505, -0.439981609582901, -0.8388192057609558, 0.5925362706184387, -0.027506615966558456, 0.6130359172821045, 0.18056374788284302, 0.18733981251716614, 0.7886047959327698, -0.2546563148498535, 0.933444082736969, 0.021430356428027153, -0.6553069353103638, 0.7301578521728516, -0.352583646774292, 0.13493673503398895, 0.6068602800369263, 0.19309626519680023, -0.5357574224472046, -0.3387715220451355, -0.8986654877662659, -1.0965393781661987, 0.976107656955719, 0.5964345932006836, -0.3314439654350281, 0.07793091982603073, 0.3529869318008423, -0.07601737231016159, 0.1973542720079422, -0.6017017364501953, -0.7880934476852417, -0.14910119771957397, -0.30063503980636597, -0.03987766429781914, -0.06392277032136917, -0.4215226471424103, -0.4188269078731537, 0.9003443121910095, 0.02441059611737728, 0.5245018005371094, 0.16645286977291107, 0.013456964865326881, -0.16003943979740143, 0.29147201776504517, 0.47506794333457947, 0.8123700022697449, -0.5420743823051453, -0.03954610601067543, 0.12826716899871826, -0.6658602952957153, 0.09910915791988373, 0.41616523265838623, -0.046814922243356705, -0.09225834906101227, 0.5571912527084351, 0.9201089143753052, 0.15787100791931152, -0.39945635199546814, 0.5012971758842468, 0.012200151570141315, -0.32718124985694885, -0.512357771396637, 0.14452503621578217, -0.011078531853854656, 0.44795048236846924, 0.5478939414024353, 0.051181610673666, -0.01806718297302723, -0.21896786987781525, 0.3292087912559509, 0.2324361503124237, -0.093692347407341, -0.3753080368041992, 0.6022223830223083, -0.0009446568437851965, -0.3263702392578125, 0.7173041701316833, -0.1439022272825241, -0.5429641008377075, 1.0941205024719238, 0.321029931306839, 0.8643348813056946, -0.04164405167102814, 0.18543748557567596, 0.6618730425834656, 0.37852248549461365, -0.11261306703090668, 0.6044094562530518, 0.14227698743343353, -0.6424087285995483, -0.3556678891181946, -0.8472611308097839, -0.23874185979366302, 0.43202710151672363, -1.081437587738037, 0.3746388256549835, -0.23184435069561005, -0.3013300597667694, -0.04878484830260277, 0.48921144008636475, -0.8759315013885498, 0.1267957240343094, 0.021400965750217438, 0.9582449793815613, -1.1147491931915283, 0.586246907711029, 0.8532550930976868, -0.5130718350410461, -0.8809205293655396, -0.2965478301048279, 0.1023838073015213, -0.8577924370765686, 0.4813491404056549, 0.331223726272583, 0.4343447983264923, -0.15744221210479736, -0.568717360496521, -1.049506425857544, 1.476918339729309, 0.12922874093055725, -0.6368709802627563, 0.12695015966892242, 0.15568961203098297, 0.29637765884399414, -0.332930326461792, 0.49654901027679443, 0.7717220783233643, 0.807011067867279, -0.030028698965907097, -0.8625316619873047, 0.3455996811389923, -0.5094425082206726, -0.16918304562568665, 0.3273372948169708, -0.8940423130989075, 0.987058162689209, -0.19161829352378845, 0.004963892512023449, -0.1098005473613739, 0.4615534543991089, 0.6186342835426331, 0.3576292395591736, 0.43853771686553955, 0.719257116317749, 0.703434944152832, -0.36629241704940796, 1.016388177871704, -0.35180866718292236, 0.8466054797172546, 1.020510196685791, 0.08215483278036118, 0.742253839969635, 0.35935744643211365, -0.4955763518810272, 0.5246413350105286, 0.9020386934280396, -0.3677833676338196, 0.49842947721481323, 0.07571917772293091, -0.03425043076276779, -0.0950961709022522, 0.02353290095925331, -0.509475827217102, 0.4016156494617462, 0.14016063511371613, -0.5637558102607727, -0.29273587465286255, -0.3010863661766052, 0.18137481808662415, -0.2739163339138031, -0.20591461658477783, 0.5186671018600464, -0.023316679522395134, -0.43045055866241455, 0.7666774392127991, -0.04086150974035263, 0.7520678043365479, -0.6081323623657227, -0.021334119141101837, -0.31104427576065063, 0.2732515037059784, -0.5528578758239746, -0.9629914164543152, 0.35389062762260437, 0.10425298660993576, -0.2868078649044037, -0.2122524082660675, 0.5689185261726379, -0.23388557136058807, -0.7073538899421692, 0.342673659324646, 0.4570694863796234, 0.2472461462020874, 0.04085146263241768, -0.9786846041679382, 0.26676827669143677, 0.29201820492744446, -0.815697193145752, 0.3549455404281616, 0.25476765632629395, 0.14133885502815247, 0.5404367446899414, 0.7933926582336426, 0.08917634934186935, 0.12196122109889984, -0.05598123371601105, 1.1182461977005005, -0.8743074536323547, -0.4197161793708801, -0.8437554240226746, 0.7949355840682983, -0.251889169216156, -0.6297943592071533, 0.8087722659111023, 0.993259072303772, 0.7627261281013489, 0.09755726903676987, 0.8050708770751953, -0.452970027923584, 0.48133420944213867, -0.37512707710266113, 0.856381356716156, -0.7053780555725098, 0.31853801012039185, -0.20878447592258453, -0.8944127559661865, -0.04182986915111542, 0.705020010471344, -0.23575687408447266, -0.061452899128198624, 0.6184384226799011, 1.0177972316741943, 0.00524155842140317, 0.23943358659744263, -0.10483184456825256, 0.42052602767944336, 0.35517415404319763, 0.6032298803329468, 0.6386937499046326, -0.6583122611045837, 0.4648493826389313, -0.7297803163528442, -0.40711838006973267, -0.17652341723442078, -0.7697849869728088, -0.8272660374641418, -0.5161612033843994, -0.3960307836532593, -0.5210072994232178, 0.017407167702913284, 1.0827714204788208, 0.530265748500824, -0.9023146033287048, -0.42424315214157104, -0.06821898370981216, 0.16760693490505219, -0.3098065257072449, -0.35555997490882874, 0.646182656288147, -0.14822238683700562, -0.7554781436920166, 0.35036736726760864, -0.15545396506786346, -0.12720847129821777, -0.04068399593234062, -0.21448451280593872, -0.3744427561759949, -0.3414025604724884, 0.41127926111221313, 0.14580361545085907, -0.6259867548942566, -0.30388137698173523, -0.03835189715027809, 0.011085371486842632, 0.33170264959335327, 0.2911241054534912, -0.5861291885375977, -0.12456046789884567, 0.5237072706222534, 0.35748979449272156, 0.686589777469635, 0.10724429041147232, 0.20780716836452484, -0.8039851784706116, -0.021762430667877197, 0.0034505717922002077, 0.49747756123542786, 0.23744451999664307, -0.506259560585022, 1.00799560546875, 0.3631734549999237, -0.6896491050720215, -0.924367368221283, -0.29392209649086, -1.1916388273239136, 0.005053871311247349, 1.457838773727417, -0.19551506638526917, -0.4117490351200104, 0.138021782040596, -0.13044480979442596, 0.3027586340904236, -0.8121005296707153, 0.5722470283508301, 0.7716102600097656, -0.3094887137413025, -0.017170704901218414, -0.6178132891654968, 0.35565993189811707, 0.016221627593040466, -0.9436551928520203, 0.07979471981525421, 0.31158217787742615, 0.3826206922531128, 0.22123323380947113, 0.6277053952217102, -0.032015327364206314, -0.13717235624790192, -0.06703948229551315, 0.1388295739889145, -0.3764495253562927, -0.14896003901958466, -0.15845654904842377, 0.10690728574991226, -0.4738908112049103, -0.532197117805481 ]
open-llm-leaderboard/details_Brillibits__Instruct_Llama70B_Dolly15k
open-llm-leaderboard
2023-12-01T14:40:41Z
201
0
[ "region:us" ]
null
2023-09-16T22:45:39Z
--- pretty_name: Evaluation run of Brillibits/Instruct_Llama70B_Dolly15k dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Brillibits/Instruct_Llama70B_Dolly15k](https://huggingface.co/Brillibits/Instruct_Llama70B_Dolly15k)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Brillibits__Instruct_Llama70B_Dolly15k_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-07T07:12:49.365073](https://huggingface.co/datasets/open-llm-leaderboard/details_Brillibits__Instruct_Llama70B_Dolly15k_public/blob/main/results_2023-11-07T07-12-49.365073.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2294463087248322,\n\ \ \"em_stderr\": 0.004306075513502917,\n \"f1\": 0.2826310822147651,\n\ \ \"f1_stderr\": 0.004256290262260348,\n \"acc\": 0.6348872917405918,\n\ \ \"acc_stderr\": 0.01192527682309685\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.2294463087248322,\n \"em_stderr\": 0.004306075513502917,\n\ \ \"f1\": 0.2826310822147651,\n \"f1_stderr\": 0.004256290262260348\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4268385140257771,\n \ \ \"acc_stderr\": 0.013624249696595222\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8429360694554064,\n \"acc_stderr\": 0.010226303949598477\n\ \ }\n}\n```" repo_url: https://huggingface.co/Brillibits/Instruct_Llama70B_Dolly15k leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_drop_3 data_files: - split: 2023_11_07T07_12_49.365073 path: - '**/details_harness|drop|3_2023-11-07T07-12-49.365073.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-07T07-12-49.365073.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_07T07_12_49.365073 path: - '**/details_harness|gsm8k|5_2023-11-07T07-12-49.365073.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-07T07-12-49.365073.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_07T07_12_49.365073 path: - '**/details_harness|winogrande|5_2023-11-07T07-12-49.365073.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-07T07-12-49.365073.parquet' - config_name: results data_files: - split: 2023_11_07T07_12_49.365073 path: - results_2023-11-07T07-12-49.365073.parquet - split: latest path: - results_2023-11-07T07-12-49.365073.parquet --- # Dataset Card for Evaluation run of Brillibits/Instruct_Llama70B_Dolly15k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Brillibits/Instruct_Llama70B_Dolly15k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Brillibits/Instruct_Llama70B_Dolly15k](https://huggingface.co/Brillibits/Instruct_Llama70B_Dolly15k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Brillibits__Instruct_Llama70B_Dolly15k_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-07T07:12:49.365073](https://huggingface.co/datasets/open-llm-leaderboard/details_Brillibits__Instruct_Llama70B_Dolly15k_public/blob/main/results_2023-11-07T07-12-49.365073.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2294463087248322, "em_stderr": 0.004306075513502917, "f1": 0.2826310822147651, "f1_stderr": 0.004256290262260348, "acc": 0.6348872917405918, "acc_stderr": 0.01192527682309685 }, "harness|drop|3": { "em": 0.2294463087248322, "em_stderr": 0.004306075513502917, "f1": 0.2826310822147651, "f1_stderr": 0.004256290262260348 }, "harness|gsm8k|5": { "acc": 0.4268385140257771, "acc_stderr": 0.013624249696595222 }, "harness|winogrande|5": { "acc": 0.8429360694554064, "acc_stderr": 0.010226303949598477 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3422517478466034, -0.6968674659729004, 0.16448785364627838, 0.3386775553226471, -0.08930301666259766, 0.15331177413463593, -0.32834354043006897, -0.15053147077560425, 0.4296543300151825, 0.6234481930732727, -0.7331606149673462, -0.9642496109008789, -0.6304399371147156, 0.243013396859169, -0.17529821395874023, 1.0715821981430054, -0.2728478014469147, -0.22185029089450836, -0.10516129434108734, -0.2634913921356201, -0.4399455785751343, -0.39731770753860474, -0.44055628776550293, -0.47243833541870117, 0.41635844111442566, 0.6387722492218018, 0.35685625672340393, 0.6651535034179688, 0.6857439875602722, 0.390666663646698, -0.22273379564285278, 0.21524208784103394, -0.526837170124054, -0.07255841046571732, 0.20052020251750946, -0.6310173869132996, -0.8244364261627197, 0.07616647332906723, 0.6583573222160339, 0.4915580451488495, -0.20410364866256714, 0.622624397277832, 0.11065131425857544, 0.6152650117874146, -0.5024825930595398, 0.28785794973373413, -0.37698277831077576, 0.019769294187426567, -0.3222688138484955, -0.16381116211414337, -0.12254749238491058, -0.3135022819042206, -0.20882441103458405, -0.4601437747478485, 0.18120765686035156, 0.13354818522930145, 1.1563502550125122, 0.047551825642585754, -0.15267746150493622, -0.2639642059803009, -0.3406144976615906, 0.9402716755867004, -0.8178835511207581, 0.05333492532372475, 0.7044143080711365, 0.10832980275154114, -0.2770191729068756, -0.5617130398750305, -0.4036842882633209, -0.13654831051826477, -0.2922542989253998, 0.2708984911441803, 0.015357939526438713, -0.10880322009325027, 0.4476199448108673, 0.6679230332374573, -0.7288509607315063, 0.03577320650219917, -0.6078423261642456, -0.18645218014717102, 1.045425295829773, 0.40004098415374756, 0.009479056112468243, -0.5620854496955872, -0.48523345589637756, -0.23563261330127716, -0.4512469172477722, 0.24644115567207336, 0.5036678314208984, 0.5070432424545288, -0.7244849801063538, 0.835079550743103, -0.49916961789131165, 0.6202386021614075, -0.12504740059375763, -0.28397274017333984, 0.8461580276489258, -0.5524831414222717, -0.27341434359550476, -0.03474648669362068, 0.9811933636665344, 0.3978292942047119, -0.047943707555532455, 0.16265563666820526, -0.34029892086982727, -0.12719222903251648, 0.10086195170879364, -0.7274518609046936, -0.08331936597824097, 0.468909353017807, -0.5700873136520386, -0.3935025930404663, 0.22784523665905, -0.9139266610145569, -0.16944104433059692, -0.35972434282302856, 0.2595095932483673, -0.12021572142839432, -0.2577818036079407, -0.05925191938877106, -0.02565792202949524, 0.3155992031097412, 0.21492598950862885, -0.5883904695510864, 0.29349464178085327, 0.5959901213645935, 1.0355514287948608, -0.11219832301139832, -0.3083989918231964, -0.39418187737464905, -0.18474112451076508, -0.1316160410642624, 0.46924713253974915, -0.1908363550901413, -0.4986509680747986, -0.06469883024692535, 0.3518184423446655, -0.27724984288215637, -0.6577460169792175, 0.7250972390174866, -0.2511390149593353, 0.19739338755607605, -0.2965282201766968, -0.3993285298347473, -0.10808855295181274, 0.44088977575302124, -0.6399840712547302, 1.4891400337219238, 0.36284711956977844, -0.7300122380256653, 0.08844531327486038, -0.7613469362258911, -0.2222103625535965, 0.0759638100862503, 0.060587503015995026, -0.6545034050941467, -0.2072419822216034, 0.14455513656139374, 0.6473886966705322, -0.32407552003860474, -0.014668203890323639, -0.2614559233188629, -0.27700161933898926, 0.1557198017835617, -0.03779035434126854, 1.085115909576416, 0.23144836723804474, -0.26893389225006104, 0.11265141516923904, -1.0342046022415161, 0.03050728514790535, 0.29729220271110535, -0.6671883463859558, -0.1359441727399826, -0.3970564007759094, 0.0915328711271286, 0.08249146491289139, 0.5701711177825928, -0.6084802150726318, 0.340337872505188, -0.13669586181640625, 0.13253632187843323, 0.9822044968605042, -0.04964573308825493, 0.306650310754776, -0.5213510394096375, 0.6050567030906677, 0.03445540368556976, 0.3231430649757385, 0.04260265827178955, -0.6138607263565063, -0.8258346319198608, -0.24580354988574982, 0.18912996351718903, 0.7473323941230774, -0.4465283453464508, 0.7350026965141296, -0.38127121329307556, -0.6726415157318115, -0.7449733018875122, 0.24344022572040558, 0.4728924334049225, 0.5731950402259827, 0.4015079140663147, -0.351250022649765, -0.7112218737602234, -0.97496497631073, 0.030576836317777634, -0.09124259650707245, -0.06256872415542603, 0.49631357192993164, 1.0055381059646606, -0.3257805407047272, 0.6950286626815796, -0.6776256561279297, -0.3240736722946167, -0.35362380743026733, 0.07102272659540176, 0.7931147217750549, 0.4719248414039612, 0.4157608449459076, -0.6494917273521423, -0.3583005964756012, 0.062446191906929016, -0.7925419807434082, -0.238447904586792, -0.17295046150684357, -0.3390846252441406, 0.3263527452945709, -0.12183894217014313, -0.565761148929596, 0.6281349658966064, 0.5357761979103088, -0.5773575305938721, 0.6218821406364441, -0.031218035146594048, 0.3339715003967285, -1.2276452779769897, 0.1209358423948288, 0.03087102435529232, -0.029119295999407768, -0.42134368419647217, -0.19176693260669708, 0.006296040024608374, 0.33644768595695496, -0.3666805922985077, 0.6597615480422974, -0.45627039670944214, -0.23562419414520264, 0.09290061891078949, 0.22697804868221283, -0.0514911524951458, 0.4946709871292114, -0.32846835255622864, 0.6502963304519653, 0.4472774267196655, -0.3899892270565033, 0.5270630121231079, 0.5569055080413818, -0.5283078551292419, 0.3394900858402252, -0.47616854310035706, 0.019969431683421135, 0.1299067735671997, 0.10121579468250275, -0.8605800867080688, -0.37176233530044556, 0.36693698167800903, -0.5786442756652832, 0.24179716408252716, -0.3840292692184448, -0.46056243777275085, -0.47310903668403625, -0.46381664276123047, 0.06850433349609375, 0.49386224150657654, -0.4752924144268036, 0.36020156741142273, 0.3648608326911926, 0.010187157429754734, -0.74381023645401, -0.755467414855957, -0.1561809927225113, -0.30301976203918457, -0.7453663349151611, 0.37303754687309265, -0.10366867482662201, -0.26435643434524536, -0.053208786994218826, -0.06932435184717178, -0.007774427067488432, 0.10075758397579193, 0.3409687280654907, 0.5241442918777466, -0.06904711574316025, -0.33928442001342773, -0.28592413663864136, -0.11384091526269913, 0.08764936029911041, 0.25800323486328125, 0.6055656671524048, -0.23273751139640808, -0.25527167320251465, -0.22202536463737488, 0.07390207052230835, 0.3663204610347748, -0.17065075039863586, 0.8657438158988953, 0.7272236943244934, -0.22096343338489532, -0.11238894611597061, -0.4144374430179596, -0.01676945760846138, -0.4915526211261749, 0.2961811125278473, -0.2823094129562378, -0.7683466076850891, 0.8897805213928223, 0.20997384190559387, 0.16501812636852264, 0.5453110933303833, 0.6928722858428955, 0.022430945187807083, 0.5761300325393677, 0.18559390306472778, -0.09628499299287796, 0.5406578779220581, -0.8629841208457947, -0.028827577829360962, -1.0945117473602295, -0.4701423943042755, -0.51148521900177, -0.43895280361175537, -0.832775890827179, -0.42791104316711426, 0.25234439969062805, 0.1792905181646347, -0.34327322244644165, 0.5048848986625671, -0.574554443359375, 0.19888511300086975, 0.6531968712806702, 0.24758118391036987, 0.05670713633298874, -0.06875360757112503, -0.09388253837823868, 0.32781490683555603, -0.529773473739624, -0.45944342017173767, 1.3710589408874512, 0.28100112080574036, 0.6897211670875549, 0.00971127301454544, 1.017791509628296, 0.3119906187057495, 0.3441632091999054, -0.5560981631278992, 0.678855836391449, 0.07276908308267593, -0.5507605671882629, -0.18410520255565643, -0.5835058689117432, -1.051624059677124, 0.17913757264614105, -0.13223698735237122, -0.9920245409011841, 0.10385993868112564, 0.046610478311777115, -0.047511518001556396, 0.30427515506744385, -0.5539440512657166, 0.8407647013664246, -0.20091520249843597, -0.5796909332275391, 0.09030888974666595, -0.7960063815116882, 0.43192148208618164, 0.1807718276977539, 0.40274110436439514, -0.3124065101146698, 0.11153443157672882, 1.0820847749710083, -0.5850973129272461, 0.7704480886459351, -0.18708549439907074, 0.0992676392197609, 0.4205857515335083, -0.26807886362075806, 0.6409996151924133, 0.00002269131800858304, -0.20664389431476593, 0.4456872344017029, -0.20972643792629242, -0.3893180191516876, -0.28581610321998596, 0.8650845289230347, -0.8341957330703735, -0.37415769696235657, -0.41818591952323914, -0.6404764652252197, 0.23751507699489594, 0.17246702313423157, 0.24927790462970734, 0.3271488547325134, 0.12527021765708923, 0.30187931656837463, 0.29597944021224976, -0.21600742638111115, 0.5224277377128601, 0.3860325217247009, -0.253073513507843, -0.6917718052864075, 0.7106701135635376, 0.24264292418956757, 0.12976039946079254, 0.32594457268714905, 0.08097581565380096, -0.4865790009498596, -0.472468763589859, -0.35727426409721375, 0.3007085621356964, -0.5758091807365417, -0.38743317127227783, -0.4132521450519562, -0.2177218347787857, -0.38758450746536255, -0.11481092125177383, -0.43557026982307434, -0.4496784806251526, -0.3792985677719116, -0.320121705532074, 0.7005221843719482, 0.6307526230812073, -0.3772927224636078, 0.36968061327934265, -0.7888045907020569, 0.1883508861064911, -0.11991748213768005, 0.3586384356021881, -0.17355391383171082, -0.6040717363357544, -0.542820155620575, 0.12540970742702484, -0.5198302268981934, -0.8442163467407227, 0.625392735004425, -0.14207158982753754, 0.6729657053947449, 0.1243169903755188, 0.13118082284927368, 0.7592586874961853, -0.12683403491973877, 1.0089921951293945, -0.06161128357052803, -0.6673483848571777, 0.7398991584777832, -0.3273264467716217, 0.12524479627609253, 0.4068971276283264, 0.24194414913654327, -0.4772481620311737, -0.18603363633155823, -0.8510897159576416, -1.1444774866104126, 1.002551794052124, 0.6502987146377563, -0.2847095727920532, 0.1953589767217636, 0.2897014617919922, -0.10525573045015335, 0.15309564769268036, -0.6929357647895813, -0.6880900263786316, -0.030284980311989784, -0.33877864480018616, -0.021671505644917488, -0.10350273549556732, -0.4864024221897125, -0.38457605242729187, 0.8484683036804199, 0.03777455911040306, 0.5093548893928528, 0.20279806852340698, -0.07798104733228683, -0.12207023054361343, 0.27092573046684265, 0.5351301431655884, 0.7423287630081177, -0.45031148195266724, -0.1419769674539566, 0.36644473671913147, -0.6523529887199402, -0.0057258871383965015, 0.2594986855983734, -0.11388478428125381, -0.0657721534371376, 0.725771963596344, 0.9823072552680969, 0.0483618900179863, -0.3887983560562134, 0.46341630816459656, 0.088014617562294, -0.2780769169330597, -0.5139140486717224, 0.08449234813451767, -0.021317223086953163, 0.37207749485969543, 0.37325164675712585, -0.1678960621356964, -0.08133690059185028, -0.3029371201992035, 0.24528130888938904, 0.33765727281570435, -0.09918322414159775, -0.35530713200569153, 0.5577611923217773, 0.02232385240495205, -0.4474833309650421, 0.6819435954093933, -0.06651224941015244, -0.49940943717956543, 1.0909397602081299, 0.26190438866615295, 0.8718757629394531, -0.2056858092546463, 0.129196897149086, 0.6249961853027344, 0.41583380103111267, -0.032228972762823105, 0.6823787093162537, 0.11259859055280685, -0.6938527226448059, -0.23040376603603363, -0.9531636834144592, -0.2638985812664032, 0.3271598219871521, -1.05064857006073, 0.4655812680721283, -0.1354111284017563, -0.18936480581760406, -0.19512894749641418, 0.37807726860046387, -0.9278025031089783, 0.18099172413349152, -0.018287023529410362, 0.8851125836372375, -0.9755065441131592, 0.573431670665741, 0.8342909812927246, -0.5100669860839844, -0.8147054314613342, -0.31939980387687683, 0.07294710725545883, -0.9243212342262268, 0.49938058853149414, 0.26265284419059753, 0.4248816668987274, -0.2255629450082779, -0.6374544501304626, -0.9999220371246338, 1.6088200807571411, 0.07866829633712769, -0.5716105103492737, 0.26021483540534973, 0.2248772382736206, 0.3112746477127075, -0.35033339262008667, 0.5955317616462708, 0.6958271265029907, 0.7137448191642761, 0.027922065928578377, -0.9673329591751099, 0.22589612007141113, -0.34476301074028015, -0.07831107079982758, 0.22720597684383392, -0.9160654544830322, 1.0281867980957031, -0.20877555012702942, 0.02681032195687294, -0.052313074469566345, 0.41945844888687134, 0.5684796571731567, 0.439260870218277, 0.38520804047584534, 0.7653053402900696, 0.6771480441093445, -0.37283146381378174, 1.0781795978546143, -0.25913894176483154, 0.9039328694343567, 0.9894692897796631, -0.004469035658985376, 0.7574043869972229, 0.3254821300506592, -0.5808165073394775, 0.5271716713905334, 0.7895967960357666, -0.3184216320514679, 0.3855135142803192, 0.18107160925865173, -0.11930831521749496, -0.07620710879564285, 0.0496918186545372, -0.39521148800849915, 0.4754364788532257, 0.22321799397468567, -0.559587836265564, -0.1460752934217453, -0.2827278971672058, 0.1561320275068283, -0.23814408481121063, -0.2362784892320633, 0.5793130397796631, -0.027848392724990845, -0.3984774649143219, 0.7881081700325012, -0.12598243355751038, 0.6286411285400391, -0.647948682308197, -0.15717187523841858, -0.36334455013275146, 0.26463863253593445, -0.5725416541099548, -1.0125213861465454, 0.18167808651924133, 0.04538856819272041, -0.21065349876880646, -0.2678910493850708, 0.6066962480545044, -0.3276764154434204, -0.6520103812217712, 0.4338361918926239, 0.37844958901405334, 0.30414095520973206, 0.2831808924674988, -0.8602828979492188, 0.2878122925758362, 0.3084326982498169, -0.848407506942749, 0.39232054352760315, 0.30907487869262695, 0.12363985180854797, 0.6372599601745605, 0.7161688208580017, 0.07780858874320984, 0.17428214848041534, -0.12679722905158997, 1.1097698211669922, -0.8113794326782227, -0.28373199701309204, -0.7827043533325195, 1.0160465240478516, -0.32742348313331604, -0.5945444703102112, 0.8681146502494812, 1.0123544931411743, 0.9480951428413391, 0.02293357253074646, 0.8228325843811035, -0.4903373718261719, 0.5589473843574524, -0.5053830742835999, 0.7848684787750244, -0.6953391432762146, 0.376154363155365, -0.1789427548646927, -0.8143331408500671, -0.06819076836109161, 0.6897390484809875, -0.25449395179748535, 0.021181585267186165, 0.4900239408016205, 1.1151231527328491, 0.04560727998614311, 0.06273346394300461, 0.06857211887836456, 0.5052573680877686, 0.2535459101200104, 0.5026794672012329, 0.651594877243042, -0.6446512937545776, 0.41614577174186707, -0.7133660316467285, -0.44574427604675293, -0.22653616964817047, -0.6921626925468445, -0.8796690106391907, -0.515640914440155, -0.26993316411972046, -0.5465770959854126, -0.02529301680624485, 1.020498514175415, 0.4295573830604553, -0.885308027267456, -0.5215749144554138, 0.07783497869968414, 0.3301894962787628, -0.1767466515302658, -0.35061970353126526, 0.530860185623169, -0.10921581834554672, -0.8011110424995422, 0.3354937434196472, -0.111903116106987, -0.15940718352794647, -0.03876848146319389, -0.27626752853393555, -0.4314599931240082, -0.2816488742828369, 0.5426284670829773, 0.1391642838716507, -0.6436514854431152, -0.29282036423683167, -0.08589132130146027, 0.06002995744347572, 0.3028841018676758, 0.3018970787525177, -0.5584009289741516, 0.143394336104393, 0.6079457402229309, 0.2031029313802719, 0.7019951939582825, 0.10211235284805298, 0.23273965716362, -0.8120207786560059, 0.03829667344689369, 0.07481423765420914, 0.5354689359664917, 0.22027738392353058, -0.4829612374305725, 0.9923278093338013, 0.41109058260917664, -0.8002483248710632, -1.016288161277771, -0.1548662632703781, -1.2764743566513062, 0.13607414066791534, 1.4415040016174316, -0.28530216217041016, -0.42825815081596375, 0.06848005950450897, -0.21487371623516083, 0.2909287214279175, -0.6851131319999695, 0.56819087266922, 0.6851629614830017, -0.37430718541145325, -0.08671015501022339, -0.7185026407241821, 0.2951836585998535, 0.005924935918301344, -0.9304815530776978, -0.008230666629970074, 0.2851969599723816, 0.3505498170852661, 0.2415451556444168, 0.5427377223968506, 0.06255465745925903, -0.2014968991279602, -0.010317491367459297, 0.3402327597141266, -0.1536272168159485, -0.0625123456120491, -0.14550070464611053, 0.07780766487121582, -0.42169123888015747, -0.5068849921226501 ]
open-llm-leaderboard/details_Undi95__MLewd-L2-Chat-13B
open-llm-leaderboard
2023-12-01T14:12:01Z
201
0
[ "region:us" ]
null
2023-09-18T13:38:52Z
--- pretty_name: Evaluation run of Undi95/MLewd-L2-Chat-13B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Undi95/MLewd-L2-Chat-13B](https://huggingface.co/Undi95/MLewd-L2-Chat-13B) on\ \ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__MLewd-L2-Chat-13B_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-07T04:02:20.497765](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MLewd-L2-Chat-13B_public/blob/main/results_2023-11-07T04-02-20.497765.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.039953859060402684,\n\ \ \"em_stderr\": 0.0020056958276819816,\n \"f1\": 0.12528313758389248,\n\ \ \"f1_stderr\": 0.0025138994037981494,\n \"acc\": 0.44361714795535834,\n\ \ \"acc_stderr\": 0.010234482644867801\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.039953859060402684,\n \"em_stderr\": 0.0020056958276819816,\n\ \ \"f1\": 0.12528313758389248,\n \"f1_stderr\": 0.0025138994037981494\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11296436694465505,\n \ \ \"acc_stderr\": 0.008719339028833055\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7742699289660616,\n \"acc_stderr\": 0.011749626260902545\n\ \ }\n}\n```" repo_url: https://huggingface.co/Undi95/MLewd-L2-Chat-13B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_drop_3 data_files: - split: 2023_11_05T00_36_15.205012 path: - '**/details_harness|drop|3_2023-11-05T00-36-15.205012.parquet' - split: 2023_11_07T04_02_20.497765 path: - '**/details_harness|drop|3_2023-11-07T04-02-20.497765.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-07T04-02-20.497765.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_05T00_36_15.205012 path: - '**/details_harness|gsm8k|5_2023-11-05T00-36-15.205012.parquet' - split: 2023_11_07T04_02_20.497765 path: - '**/details_harness|gsm8k|5_2023-11-07T04-02-20.497765.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-07T04-02-20.497765.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_05T00_36_15.205012 path: - '**/details_harness|winogrande|5_2023-11-05T00-36-15.205012.parquet' - split: 2023_11_07T04_02_20.497765 path: - '**/details_harness|winogrande|5_2023-11-07T04-02-20.497765.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-07T04-02-20.497765.parquet' - config_name: results data_files: - split: 2023_11_05T00_36_15.205012 path: - results_2023-11-05T00-36-15.205012.parquet - split: 2023_11_07T04_02_20.497765 path: - results_2023-11-07T04-02-20.497765.parquet - split: latest path: - results_2023-11-07T04-02-20.497765.parquet --- # Dataset Card for Evaluation run of Undi95/MLewd-L2-Chat-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/MLewd-L2-Chat-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/MLewd-L2-Chat-13B](https://huggingface.co/Undi95/MLewd-L2-Chat-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__MLewd-L2-Chat-13B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-07T04:02:20.497765](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MLewd-L2-Chat-13B_public/blob/main/results_2023-11-07T04-02-20.497765.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.039953859060402684, "em_stderr": 0.0020056958276819816, "f1": 0.12528313758389248, "f1_stderr": 0.0025138994037981494, "acc": 0.44361714795535834, "acc_stderr": 0.010234482644867801 }, "harness|drop|3": { "em": 0.039953859060402684, "em_stderr": 0.0020056958276819816, "f1": 0.12528313758389248, "f1_stderr": 0.0025138994037981494 }, "harness|gsm8k|5": { "acc": 0.11296436694465505, "acc_stderr": 0.008719339028833055 }, "harness|winogrande|5": { "acc": 0.7742699289660616, "acc_stderr": 0.011749626260902545 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3956095278263092, -0.7253473997116089, 0.11999540776014328, 0.32498520612716675, -0.1416456550359726, 0.11238537728786469, -0.44564419984817505, -0.2363535761833191, 0.40280231833457947, 0.5567663311958313, -0.7619163393974304, -0.9017199277877808, -0.637273371219635, 0.08960219472646713, -0.18381883203983307, 1.1420364379882812, -0.10008973628282547, -0.18946939706802368, 0.05619945377111435, -0.2880464494228363, -0.4060154855251312, -0.49440422654151917, -0.5941420197486877, -0.4291117191314697, 0.399336576461792, 0.659658670425415, 0.3756713271141052, 0.5766859650611877, 0.6769211888313293, 0.3793984353542328, -0.13342943787574768, 0.27564260363578796, -0.5711698532104492, -0.09703756868839264, 0.3017403483390808, -0.5800062417984009, -0.757064938545227, 0.11318469047546387, 0.6120718717575073, 0.47159287333488464, -0.2064242660999298, 0.5678222179412842, 0.2828025817871094, 0.6625744104385376, -0.4214933514595032, 0.3806184232234955, -0.43354564905166626, -0.0920586884021759, -0.3377351462841034, -0.139804869890213, -0.12162754684686661, -0.2620621919631958, -0.24094781279563904, -0.41706958413124084, 0.03243894875049591, 0.0770142525434494, 0.9768639802932739, 0.12901727855205536, -0.17986233532428741, -0.2594716548919678, -0.36981400847435, 0.7921704649925232, -0.9708035588264465, 0.04613780230283737, 0.641802966594696, 0.11041340976953506, -0.36421999335289, -0.6322699189186096, -0.36367008090019226, -0.030960962176322937, -0.286192387342453, 0.10138394683599472, -0.032864831387996674, -0.10697443783283234, 0.37660688161849976, 0.5581100583076477, -0.6640180349349976, 0.20355623960494995, -0.6059333682060242, -0.1792677789926529, 1.0204546451568604, 0.33440035581588745, 0.06888078898191452, -0.4090487062931061, -0.3536511957645416, -0.2469763308763504, -0.4328575134277344, 0.2719387412071228, 0.44990068674087524, 0.4995949864387512, -0.7028095722198486, 0.8056331276893616, -0.3634706735610962, 0.40594226121902466, -0.041332315653562546, -0.26345980167388916, 0.8058031797409058, -0.5150563716888428, -0.22570230066776276, -0.04571588709950447, 1.1280962228775024, 0.5690182447433472, 0.06367452442646027, 0.24171239137649536, -0.19959348440170288, -0.1111719161272049, -0.025599947199225426, -0.8072490096092224, -0.19249014556407928, 0.4869562089443207, -0.6477363705635071, -0.4835677444934845, 0.15585516393184662, -0.8923101425170898, -0.23805896937847137, -0.223556250333786, 0.1522999107837677, -0.12937018275260925, -0.41833963990211487, -0.2077757567167282, -0.062020640820264816, 0.18636834621429443, 0.10111599415540695, -0.6857061386108398, 0.36224696040153503, 0.6446570754051208, 0.9767957329750061, -0.202426940202713, -0.35452085733413696, -0.2770090699195862, -0.3423464298248291, -0.20836518704891205, 0.475166916847229, -0.15947994589805603, -0.42427170276641846, -0.17034929990768433, 0.34023767709732056, -0.268010675907135, -0.5931802988052368, 0.7432286143302917, -0.2730882167816162, 0.2477467656135559, -0.19187361001968384, -0.5548850893974304, -0.06449849903583527, 0.3672778904438019, -0.6065377593040466, 1.5383594036102295, 0.27475443482398987, -0.915420413017273, 0.04703960940241814, -0.8556861877441406, -0.16469913721084595, -0.03788655996322632, -0.022857926785945892, -0.5359635353088379, -0.21754878759384155, 0.15612366795539856, 0.6347736716270447, -0.3009878993034363, 0.054597146809101105, -0.3641015887260437, -0.4223938286304474, 0.23227854073047638, -0.21394209563732147, 1.0985100269317627, 0.20335659384727478, -0.47159087657928467, 0.1605321615934372, -0.9805888533592224, 0.16335199773311615, 0.27799034118652344, -0.5624416470527649, -0.23139819502830505, -0.17995305359363556, 0.21473044157028198, 0.17875798046588898, 0.5300368666648865, -0.5757085084915161, 0.29225486516952515, -0.2924063801765442, 0.2748441994190216, 1.0098251104354858, 0.02411499246954918, 0.2944068908691406, -0.356410413980484, 0.3878675401210785, 0.0984513908624649, 0.335906445980072, 0.10891138017177582, -0.608931303024292, -0.8012639880180359, -0.20660965144634247, 0.14660362899303436, 0.7768215537071228, -0.45624637603759766, 0.8441556692123413, -0.3157171905040741, -0.7023274898529053, -0.7614294290542603, 0.20760352909564972, 0.4417448937892914, 0.43367666006088257, 0.3146548569202423, -0.22698697447776794, -0.7473130822181702, -0.9793447256088257, 0.06585465371608734, -0.18341124057769775, -0.0015734813641756773, 0.5650219321250916, 0.9229687452316284, -0.4222615659236908, 0.6360476016998291, -0.6584119200706482, -0.37366783618927, -0.3016636073589325, 0.005173601675778627, 0.7794989943504333, 0.5824673175811768, 0.43095505237579346, -0.6064634323120117, -0.4201366901397705, 0.013642866164445877, -0.8141120076179504, -0.24647065997123718, -0.15432381629943848, -0.29594650864601135, 0.3769911527633667, 0.09957873821258545, -0.5190866589546204, 0.5471518635749817, 0.6103492379188538, -0.5665905475616455, 0.6014431715011597, -0.08674625307321548, 0.3901427686214447, -1.2446224689483643, 0.11573387682437897, -0.06055150181055069, -0.0004438083269633353, -0.4846701920032501, -0.20071616768836975, 0.043135616928339005, 0.32047632336616516, -0.41424891352653503, 0.693978488445282, -0.45770958065986633, -0.23282553255558014, -0.08091366291046143, 0.19287382066249847, -0.22044433653354645, 0.6045487523078918, -0.33113566040992737, 0.7872843742370605, 0.5244995951652527, -0.3911033868789673, 0.47991278767585754, 0.4302510917186737, -0.5534677505493164, 0.22764283418655396, -0.5662157535552979, 0.10335317254066467, 0.19350209832191467, 0.1599893420934677, -0.9477148056030273, -0.28203117847442627, 0.45874273777008057, -0.6080619096755981, 0.22385555505752563, -0.27436375617980957, -0.5443478226661682, -0.527635931968689, -0.487013578414917, 0.12923403084278107, 0.5376372337341309, -0.41983166337013245, 0.16699159145355225, 0.475306898355484, 0.006815115921199322, -0.7172231674194336, -0.7255205512046814, -0.08094602823257446, -0.3265484869480133, -0.724136233329773, 0.2744569182395935, -0.23567987978458405, -0.30067816376686096, -0.03468798100948334, 0.00022834738774690777, -0.03525232523679733, 0.1622302234172821, 0.409058153629303, 0.49227410554885864, -0.13408564031124115, -0.371468186378479, -0.26003435254096985, -0.156031534075737, 0.15760062634944916, 0.20710276067256927, 0.6293992400169373, -0.1795293688774109, -0.3334655165672302, -0.3296980559825897, 0.22497007250785828, 0.4858534336090088, -0.04934791848063469, 0.9172327518463135, 0.7947874665260315, -0.2511368989944458, -0.06477844715118408, -0.4477941393852234, -0.08671630173921585, -0.5004956722259521, 0.3750607669353485, -0.2706442177295685, -0.8662424683570862, 0.8759121298789978, 0.2376551777124405, 0.24078375101089478, 0.644885778427124, 0.6821552515029907, -0.04598241299390793, 0.8397960066795349, 0.2939509451389313, -0.18937501311302185, 0.4948607087135315, -0.6987735629081726, -0.04184655472636223, -1.0973503589630127, -0.45485150814056396, -0.43929141759872437, -0.4720812439918518, -0.8817499279975891, -0.4267672598361969, 0.23458731174468994, 0.2242691069841385, -0.3977545499801636, 0.44032812118530273, -0.6159767508506775, 0.30091023445129395, 0.6540172100067139, 0.20237256586551666, 0.022783761844038963, -0.11448439210653305, -0.09692502021789551, 0.13313964009284973, -0.45567476749420166, -0.4497936964035034, 1.3995933532714844, 0.33209285140037537, 0.6847493648529053, 0.005908752325922251, 0.9078495502471924, 0.38383474946022034, 0.3219618499279022, -0.5124279856681824, 0.6375620365142822, -0.02733614854514599, -0.5159000754356384, -0.23376087844371796, -0.5623480081558228, -0.9471513032913208, 0.2449248731136322, -0.06182713061571121, -1.0478928089141846, 0.08671987056732178, -0.055088333785533905, 0.015003114007413387, 0.33697620034217834, -0.5795173048973083, 0.8716443777084351, -0.22943444550037384, -0.2646758258342743, -0.0178273543715477, -0.8714879751205444, 0.5559630990028381, 0.08235123753547668, 0.42155060172080994, -0.35008615255355835, 0.09392346441745758, 1.1273829936981201, -0.6201574802398682, 0.8356280326843262, -0.19708578288555145, 0.04133569821715355, 0.33615368604660034, -0.3066090941429138, 0.6517152190208435, -0.03946474939584732, -0.1951451599597931, 0.3841952979564667, -0.1127111166715622, -0.2642184793949127, -0.3680940866470337, 0.9570333361625671, -0.9445691108703613, -0.31551724672317505, -0.3113242983818054, -0.5609743595123291, 0.1828429251909256, 0.18466924130916595, 0.3475077152252197, 0.33329859375953674, -0.04027998074889183, 0.33997130393981934, 0.27122625708580017, -0.18265549838542938, 0.5406074523925781, 0.4090869724750519, -0.2530628740787506, -0.6724271178245544, 0.5754498839378357, 0.1550695151090622, 0.15115965902805328, 0.19323383271694183, -0.015742680057883263, -0.5701971054077148, -0.34158119559288025, -0.36472731828689575, 0.2542428970336914, -0.6007290482521057, -0.3246750235557556, -0.47334542870521545, -0.38174331188201904, -0.42768552899360657, 0.06631949543952942, -0.48258715867996216, -0.4834004342556, -0.5129750370979309, -0.25366881489753723, 0.6146119236946106, 0.563635528087616, -0.44589167833328247, 0.2558507025241852, -0.7926300764083862, 0.22112444043159485, -0.1706690490245819, 0.36211174726486206, -0.09944037348031998, -0.5869274139404297, -0.39360421895980835, 0.3106001317501068, -0.39686673879623413, -0.8571696877479553, 0.5374122262001038, 0.051950424909591675, 0.6949954628944397, 0.2657778263092041, 0.1984904408454895, 0.7494961619377136, -0.27037402987480164, 1.0090972185134888, 0.022478142753243446, -0.6248349547386169, 0.7297176718711853, -0.22411003708839417, -0.004751914646476507, 0.4364006519317627, 0.16586127877235413, -0.5816068053245544, -0.3077686131000519, -0.9211432933807373, -1.059522032737732, 1.0357351303100586, 0.6428502202033997, -0.17350445687770844, 0.09124753624200821, 0.27676835656166077, -0.18537257611751556, 0.10095985233783722, -0.6817821264266968, -0.9421631693840027, -0.03563864156603813, -0.2972155511379242, -0.1112862154841423, -0.20041552186012268, -0.40826985239982605, -0.40564772486686707, 0.874221682548523, 0.029174504801630974, 0.5605910420417786, 0.12173470109701157, -0.00797461997717619, -0.004509869031608105, 0.32312676310539246, 0.5550424456596375, 0.6195711493492126, -0.40218743681907654, -0.03247851878404617, 0.30073973536491394, -0.5773318409919739, 0.07076506316661835, 0.2463119775056839, 0.03748491406440735, -0.06716790795326233, 0.5709636211395264, 1.0005866289138794, 0.1395064741373062, -0.4643543064594269, 0.5681918859481812, 0.03249170258641243, -0.30191609263420105, -0.4546050727367401, 0.1260187029838562, 0.07809635251760483, 0.4574315547943115, 0.4420945346355438, -0.11507783830165863, -0.019544681534171104, -0.348766028881073, 0.3336654305458069, 0.3062761723995209, -0.156404048204422, -0.2778097093105316, 0.5980997085571289, 0.04990142211318016, -0.41253116726875305, 0.7751598358154297, -0.12089573591947556, -0.542924165725708, 1.056408166885376, 0.38658419251441956, 0.8275918960571289, -0.09885291755199432, 0.11649801582098007, 0.5581983327865601, 0.36539286375045776, -0.06969906389713287, 0.598116397857666, 0.13149622082710266, -0.762647807598114, -0.2959652841091156, -0.6840866208076477, -0.20254315435886383, 0.40170520544052124, -1.0239671468734741, 0.34828993678092957, -0.08374079316854477, -0.26078417897224426, -0.1509062498807907, 0.3740433156490326, -0.762878954410553, 0.047161318361759186, 0.04532434418797493, 0.9107899069786072, -0.9356883764266968, 0.6932002902030945, 0.718077540397644, -0.3469778597354889, -0.9137751460075378, -0.42844927310943604, 0.1645471602678299, -0.9466568231582642, 0.4775189161300659, 0.2442256212234497, 0.47102686762809753, -0.1428288221359253, -0.666391909122467, -0.9719398617744446, 1.4902297258377075, 0.06456775218248367, -0.6049185991287231, 0.20217590034008026, 0.16623927652835846, 0.3882984519004822, -0.35950684547424316, 0.617530882358551, 0.744611382484436, 0.6423678398132324, 0.014287303201854229, -1.0327208042144775, 0.22782965004444122, -0.5422722697257996, -0.1354496031999588, 0.2814303934574127, -0.8167732954025269, 0.9383692145347595, -0.2696794867515564, -0.04141082987189293, -0.025444872677326202, 0.4674929976463318, 0.5508021116256714, 0.3285655975341797, 0.43873298168182373, 0.6420000791549683, 0.719574511051178, -0.30408427119255066, 0.8985092639923096, -0.36308640241622925, 0.8881350755691528, 1.137315034866333, 0.03184761852025986, 0.830276370048523, 0.2981214225292206, -0.4834981858730316, 0.4956815242767334, 0.8440236449241638, -0.3050724267959595, 0.4371708035469055, 0.09585639834403992, -0.0272244680672884, -0.011983221396803856, 0.07739006727933884, -0.4874963164329529, 0.40436631441116333, 0.2947932183742523, -0.5537115931510925, -0.1612529307603836, -0.24679675698280334, 0.1722000539302826, -0.3190937042236328, -0.19388067722320557, 0.7028643488883972, 0.05855157598853111, -0.4371723234653473, 0.7586507797241211, -0.02432260662317276, 0.7924667596817017, -0.5636860728263855, -0.08217649906873703, -0.34433117508888245, 0.3203241229057312, -0.49797773361206055, -1.0701712369918823, 0.25294730067253113, 0.10929547995328903, -0.19322316348552704, -0.15922516584396362, 0.6422134637832642, -0.389909952878952, -0.5709469318389893, 0.45807135105133057, 0.41326895356178284, 0.39647966623306274, 0.10756482183933258, -0.9921100735664368, 0.22822631895542145, 0.3686988055706024, -0.7589301466941833, 0.42137929797172546, 0.3173399865627289, 0.09647708386182785, 0.6446653008460999, 0.8065654039382935, 0.08610762655735016, 0.08663983643054962, -0.017872169613838196, 1.0914256572723389, -0.7471849322319031, -0.3541713356971741, -0.8715357780456543, 0.8989824652671814, -0.31661248207092285, -0.5714962482452393, 0.826278805732727, 0.9360674023628235, 0.7406570315361023, 0.14166571199893951, 0.7093700170516968, -0.46623140573501587, 0.6001802086830139, -0.399033784866333, 0.8779311776161194, -0.6239997744560242, 0.3913402557373047, -0.17924636602401733, -0.8344810009002686, -0.00813943799585104, 0.6620236039161682, -0.0601419173181057, -0.0717451423406601, 0.5469944477081299, 1.0989375114440918, 0.07683774828910828, 0.07723501324653625, 0.03539742901921272, 0.48245733976364136, 0.30595603585243225, 0.572860062122345, 0.6919717788696289, -0.6900116801261902, 0.47504547238349915, -0.6728619337081909, -0.4003692865371704, -0.15714706480503082, -0.6814813017845154, -0.9372140169143677, -0.6074402332305908, -0.2789103388786316, -0.5617126226425171, -0.09496889263391495, 1.0887106657028198, 0.49093547463417053, -0.8883350491523743, -0.42218369245529175, 0.0785546749830246, 0.22404998540878296, -0.14397317171096802, -0.36694473028182983, 0.5737183094024658, -0.11777384579181671, -0.7267075181007385, 0.3661613166332245, -0.12278479337692261, -0.09392615407705307, -0.030986063182353973, -0.2848566174507141, -0.3210286498069763, -0.2868618965148926, 0.5267848372459412, 0.16162285208702087, -0.7329155206680298, -0.3156067132949829, -0.11141858994960785, -0.10750263929367065, 0.25561705231666565, 0.3148098587989807, -0.471260130405426, -0.051165953278541565, 0.5219090580940247, 0.2290135771036148, 0.6737154722213745, 0.07678721100091934, 0.22168242931365967, -0.8095663189888, 0.09389032423496246, -0.052774377167224884, 0.4352285861968994, 0.18641407787799835, -0.45868590474128723, 0.9550582766532898, 0.2955523431301117, -0.7985061407089233, -0.9808089137077332, -0.28514766693115234, -1.1652923822402954, 0.010152618400752544, 1.5027631521224976, -0.2719053030014038, -0.23159752786159515, 0.04235854744911194, -0.3084513545036316, 0.26866692304611206, -0.7002560496330261, 0.48542049527168274, 0.6947867274284363, -0.33554592728614807, -0.15260334312915802, -0.6273756623268127, 0.4444541931152344, -0.0638849064707756, -0.997069776058197, 0.04192082956433296, 0.32990407943725586, 0.42992040514945984, 0.10857925564050674, 0.785678505897522, 0.06579682976007462, -0.19518083333969116, -0.06259976327419281, 0.2151736170053482, -0.11242704838514328, -0.07058693468570709, -0.1273045837879181, -0.01806473359465599, -0.3659861087799072, -0.5201200246810913 ]
open-llm-leaderboard/details_Undi95__Emerald-13B
open-llm-leaderboard
2023-10-23T18:28:04Z
201
0
[ "region:us" ]
null
2023-10-03T17:31:47Z
--- pretty_name: Evaluation run of Undi95/Emerald-13B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Undi95/Emerald-13B](https://huggingface.co/Undi95/Emerald-13B) on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__Emerald-13B\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-23T18:27:52.311274](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Emerald-13B/blob/main/results_2023-10-23T18-27-52.311274.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.11566694630872483,\n\ \ \"em_stderr\": 0.0032753085227622833,\n \"f1\": 0.18378460570469723,\n\ \ \"f1_stderr\": 0.003376754461365903,\n \"acc\": 0.4437006222575401,\n\ \ \"acc_stderr\": 0.010610978881102105\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.11566694630872483,\n \"em_stderr\": 0.0032753085227622833,\n\ \ \"f1\": 0.18378460570469723,\n \"f1_stderr\": 0.003376754461365903\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1281273692191054,\n \ \ \"acc_stderr\": 0.009206398549980031\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7592738752959748,\n \"acc_stderr\": 0.012015559212224176\n\ \ }\n}\n```" repo_url: https://huggingface.co/Undi95/Emerald-13B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|arc:challenge|25_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-03T17-31-23.265550.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_23T18_27_52.311274 path: - '**/details_harness|drop|3_2023-10-23T18-27-52.311274.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-23T18-27-52.311274.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_23T18_27_52.311274 path: - '**/details_harness|gsm8k|5_2023-10-23T18-27-52.311274.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-23T18-27-52.311274.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hellaswag|10_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_03T17_31_23.265550 path: - '**/details_harness|truthfulqa:mc|0_2023-10-03T17-31-23.265550.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-03T17-31-23.265550.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_23T18_27_52.311274 path: - '**/details_harness|winogrande|5_2023-10-23T18-27-52.311274.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-23T18-27-52.311274.parquet' - config_name: results data_files: - split: 2023_10_03T17_31_23.265550 path: - results_2023-10-03T17-31-23.265550.parquet - split: 2023_10_23T18_27_52.311274 path: - results_2023-10-23T18-27-52.311274.parquet - split: latest path: - results_2023-10-23T18-27-52.311274.parquet --- # Dataset Card for Evaluation run of Undi95/Emerald-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/Emerald-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/Emerald-13B](https://huggingface.co/Undi95/Emerald-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__Emerald-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T18:27:52.311274](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Emerald-13B/blob/main/results_2023-10-23T18-27-52.311274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.11566694630872483, "em_stderr": 0.0032753085227622833, "f1": 0.18378460570469723, "f1_stderr": 0.003376754461365903, "acc": 0.4437006222575401, "acc_stderr": 0.010610978881102105 }, "harness|drop|3": { "em": 0.11566694630872483, "em_stderr": 0.0032753085227622833, "f1": 0.18378460570469723, "f1_stderr": 0.003376754461365903 }, "harness|gsm8k|5": { "acc": 0.1281273692191054, "acc_stderr": 0.009206398549980031 }, "harness|winogrande|5": { "acc": 0.7592738752959748, "acc_stderr": 0.012015559212224176 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.34533271193504333, -0.6463662385940552, 0.3138691186904907, 0.29176390171051025, -0.14867764711380005, 0.16415899991989136, -0.3405763804912567, -0.11688417196273804, 0.39905545115470886, 0.5925009250640869, -0.7571313381195068, -0.9444651007652283, -0.6197909116744995, 0.213525652885437, -0.1770659238100052, 0.9810920357704163, -0.20068247616291046, -0.1641683280467987, 0.1149958148598671, -0.33421412110328674, -0.44573917984962463, -0.4081377685070038, -0.4758320450782776, -0.45332834124565125, 0.35678306221961975, 0.6702467799186707, 0.40343353152275085, 0.707612156867981, 0.6236047148704529, 0.3854507803916931, -0.10467725247144699, 0.1815117597579956, -0.40059420466423035, -0.14891503751277924, 0.20902688801288605, -0.6344377398490906, -0.7784293293952942, 0.09408225119113922, 0.7065703868865967, 0.48544833064079285, -0.26668617129325867, 0.5795753598213196, 0.15831440687179565, 0.6463666558265686, -0.43579578399658203, 0.26424527168273926, -0.40252602100372314, -0.031739965081214905, -0.39160674810409546, -0.1752372533082962, -0.05839116498827934, -0.23514272272586823, -0.2843189239501953, -0.5208570957183838, 0.12005779147148132, 0.1448795348405838, 1.0092453956604004, 0.09840928763151169, -0.2474433332681656, -0.10557253658771515, -0.34551265835762024, 0.9049844145774841, -0.8213287591934204, -0.019112272188067436, 0.5924428105354309, 0.06196490302681923, -0.30005592107772827, -0.6001324653625488, -0.32440027594566345, 0.008031257428228855, -0.1804831475019455, 0.26331448554992676, -0.15923289954662323, -0.12591002881526947, 0.42569810152053833, 0.6478620171546936, -0.6449217796325684, 0.11169002205133438, -0.5832173824310303, -0.16605430841445923, 0.9945212006568909, 0.48295870423316956, 0.12166370451450348, -0.5398104786872864, -0.32822385430336, -0.2917465567588806, -0.44616955518722534, 0.22056885063648224, 0.48152637481689453, 0.4899906814098358, -0.6102503538131714, 0.7802677154541016, -0.519174337387085, 0.3657131791114807, -0.041326675564050674, -0.09153743088245392, 0.8504038453102112, -0.4572294056415558, -0.2067539095878601, 0.08737249672412872, 1.1459953784942627, 0.5082290172576904, -0.03445521369576454, 0.13739700615406036, -0.2648641765117645, -0.07470506429672241, 0.03787907585501671, -0.8094112277030945, -0.1538230925798416, 0.5298989415168762, -0.6892902255058289, -0.4099671244621277, 0.17173922061920166, -0.9517475366592407, -0.2182433307170868, -0.280447393655777, 0.27951210737228394, -0.1844741702079773, -0.30263394117355347, -0.12697814404964447, -0.1575140506029129, 0.16834565997123718, 0.1984819769859314, -0.6024152636528015, 0.34018534421920776, 0.5304627418518066, 0.9700418710708618, -0.05240393802523613, -0.38180193305015564, -0.3252670466899872, -0.19317516684532166, -0.2175966203212738, 0.5517624020576477, -0.21991153061389923, -0.43663835525512695, -0.22679594159126282, 0.3932288885116577, -0.281450092792511, -0.5839440822601318, 0.7082045674324036, -0.2305787205696106, 0.1402956247329712, -0.18016168475151062, -0.4533572196960449, -0.14481930434703827, 0.23497654497623444, -0.6756598353385925, 1.4233509302139282, 0.31443101167678833, -0.8446897864341736, 0.048541609197854996, -0.8420234322547913, -0.08426452428102493, 0.15085473656654358, 0.008234395645558834, -0.5248252153396606, -0.08915074169635773, 0.1375664472579956, 0.5563269257545471, -0.3805939257144928, 0.022246427834033966, -0.28428009152412415, -0.38054418563842773, 0.1404184103012085, -0.1349271684885025, 1.0860888957977295, 0.2432016134262085, -0.40644556283950806, 0.13332779705524445, -1.1217132806777954, 0.26450416445732117, 0.3034028112888336, -0.5210634469985962, -0.22571316361427307, -0.24758809804916382, 0.13038431107997894, 0.04158917814493179, 0.5620064735412598, -0.629657506942749, 0.30282682180404663, -0.022008031606674194, 0.3906087279319763, 1.0255404710769653, -0.015617391094565392, 0.2991792857646942, -0.37836962938308716, 0.5330769419670105, -0.0036146261263638735, 0.3108193576335907, 0.07795413583517075, -0.5114450454711914, -0.7185248136520386, -0.2682300806045532, 0.2629775106906891, 0.7924448847770691, -0.28909793496131897, 0.8256769776344299, -0.339093416929245, -0.7327598333358765, -0.747520387172699, 0.10446164757013321, 0.3433922827243805, 0.47845223546028137, 0.43829360604286194, -0.29942041635513306, -0.7084147930145264, -1.0354200601577759, 0.009466156363487244, -0.1409846991300583, 0.045671116560697556, 0.5215293169021606, 0.9539696574211121, -0.41995352506637573, 0.6196439266204834, -0.7003471851348877, -0.3349333703517914, -0.32524803280830383, 0.1502961367368698, 0.7917723655700684, 0.5716874599456787, 0.35065916180610657, -0.5277009606361389, -0.3623410165309906, -0.00357329985126853, -0.8285974860191345, -0.2479953169822693, -0.18832525610923767, -0.3315022885799408, 0.28930413722991943, -0.011771663092076778, -0.5079909563064575, 0.6269121766090393, 0.5008966326713562, -0.617912769317627, 0.5904075503349304, -0.14046086370944977, 0.467788428068161, -1.1850923299789429, 0.18939143419265747, 0.02118796855211258, 0.01939515210688114, -0.4082673490047455, -0.15501563251018524, 0.14517958462238312, 0.2964324951171875, -0.3097332715988159, 0.6160738468170166, -0.4377714693546295, -0.10808151215314865, 0.012541680596768856, 0.043035559356212616, -0.15440279245376587, 0.4871133267879486, -0.1769900619983673, 0.7435322403907776, 0.5529588460922241, -0.38778576254844666, 0.42576590180397034, 0.5062461495399475, -0.5220656394958496, 0.3087020814418793, -0.5177605748176575, -0.01906548999249935, 0.1431112140417099, 0.10608551651239395, -0.7789040207862854, -0.33477315306663513, 0.3122340142726898, -0.5452439785003662, 0.14172789454460144, -0.3697723150253296, -0.5999025702476501, -0.45921799540519714, -0.4989599287509918, 0.1298806220293045, 0.6154049038887024, -0.4551760256290436, 0.3799014985561371, 0.31849759817123413, 0.010152367874979973, -0.6038349866867065, -0.7735045552253723, -0.10845012217760086, -0.47431322932243347, -0.7249846458435059, 0.38680168986320496, -0.1801484376192093, -0.28932106494903564, 0.0063598849810659885, -0.06640461087226868, -0.08376777172088623, 0.23470163345336914, 0.36982792615890503, 0.5118994116783142, -0.08864626288414001, -0.37217453122138977, -0.2068749964237213, -0.17560207843780518, 0.1776089072227478, 0.21870747208595276, 0.5734630823135376, -0.19168786704540253, -0.19894999265670776, -0.3453596830368042, 0.23038001358509064, 0.42327460646629333, -0.10474219918251038, 0.8581445217132568, 0.6252269744873047, -0.22866711020469666, -0.10490092635154724, -0.37005841732025146, -0.0036764820106327534, -0.4853565990924835, 0.3634982407093048, -0.2760635018348694, -0.7171752452850342, 0.9426596760749817, 0.16195809841156006, 0.09350065886974335, 0.696456253528595, 0.5097208023071289, 0.05292358621954918, 0.909182071685791, 0.29737117886543274, -0.11520366370677948, 0.532403290271759, -0.7692250609397888, -0.04178275540471077, -1.2724846601486206, -0.4040934443473816, -0.48347875475883484, -0.4079391360282898, -0.8841418623924255, -0.24860283732414246, 0.22458066046237946, 0.32764825224876404, -0.5042481422424316, 0.49201494455337524, -0.6056219339370728, 0.2613241970539093, 0.7400122284889221, 0.2723709046840668, 0.1140545904636383, -0.05206725746393204, -0.12378742545843124, 0.20928092300891876, -0.47150036692619324, -0.47296082973480225, 1.4878487586975098, 0.25840631127357483, 0.7275920510292053, -0.0041567119769752026, 1.034402847290039, 0.3135145604610443, 0.16002073884010315, -0.4428263306617737, 0.6437469720840454, -0.029930610209703445, -0.5077536106109619, -0.18764656782150269, -0.6446012258529663, -1.0158782005310059, 0.22295112907886505, -0.043103016912937164, -1.0708726644515991, 0.13938336074352264, 0.03329213708639145, -0.17331260442733765, 0.34170523285865784, -0.5923854112625122, 0.8561223745346069, -0.3092788755893707, -0.4934201240539551, -0.054076358675956726, -0.7724435925483704, 0.357131689786911, 0.03926865756511688, 0.4255081117153168, -0.28561559319496155, 0.08619176596403122, 1.1275087594985962, -0.6759248971939087, 0.6867775321006775, -0.1374751776456833, 0.14863306283950806, 0.37214159965515137, -0.22870677709579468, 0.6176003217697144, 0.0035688516218215227, -0.19155216217041016, 0.4709143042564392, -0.17867054045200348, -0.3048757314682007, -0.23466025292873383, 0.8886674642562866, -0.9520962238311768, -0.36675745248794556, -0.5114513635635376, -0.557210385799408, 0.24196475744247437, 0.33322736620903015, 0.39234018325805664, 0.3988150954246521, 0.0929151326417923, 0.2732640504837036, 0.37791308760643005, -0.16408008337020874, 0.5599158406257629, 0.48336270451545715, -0.14174720644950867, -0.7423250675201416, 0.6589451432228088, 0.26039716601371765, 0.03460739180445671, 0.19095614552497864, 0.0511745847761631, -0.5372395515441895, -0.4891189932823181, -0.43238797783851624, 0.24752077460289001, -0.5634990930557251, -0.36331847310066223, -0.4236826002597809, -0.22366060316562653, -0.38944119215011597, -0.020736537873744965, -0.4354308843612671, -0.5448505878448486, -0.4801540970802307, -0.2394663393497467, 0.7318485379219055, 0.610203206539154, -0.5649831295013428, 0.3238680362701416, -0.8750925064086914, 0.15372499823570251, -0.1635582447052002, 0.38864970207214355, -0.07692836970090866, -0.613732635974884, -0.35488054156303406, 0.18302196264266968, -0.4739243686199188, -0.894100546836853, 0.5965438485145569, -0.10025837272405624, 0.6984312534332275, 0.14847329258918762, 0.14469267427921295, 0.8087255358695984, -0.32946133613586426, 0.994564950466156, 0.011150785721838474, -0.706657350063324, 0.8003608584403992, -0.3338550925254822, 0.07017093151807785, 0.5457791090011597, 0.19189773499965668, -0.5134971737861633, -0.2126084715127945, -1.0310840606689453, -1.2516590356826782, 1.1087599992752075, 0.5763688087463379, -0.3101955056190491, 0.11830704659223557, 0.25468748807907104, -0.17217935621738434, 0.16523833572864532, -0.7060977220535278, -0.8439663052558899, -0.10261180996894836, -0.31826451420783997, -0.025967834517359734, -0.012802825309336185, -0.3953765034675598, -0.31158551573753357, 0.9422720074653625, 0.05048391595482826, 0.5049312710762024, 0.15176165103912354, -0.06916414946317673, -0.15758833289146423, 0.33725428581237793, 0.323693186044693, 0.6758144497871399, -0.49811747670173645, -0.12889912724494934, 0.26676446199417114, -0.5917355418205261, 0.00472272327169776, 0.37252727150917053, -0.0670609101653099, -0.21233844757080078, 0.5842460989952087, 0.9021598100662231, 0.014932640828192234, -0.4078664481639862, 0.43682360649108887, -0.006572729907929897, -0.24165156483650208, -0.5075163841247559, 0.12789541482925415, -0.05232798308134079, 0.403521865606308, 0.47167059779167175, -0.10920101404190063, 0.08153367042541504, -0.4175831079483032, 0.2076822966337204, 0.30669111013412476, -0.11341158300638199, -0.31903699040412903, 0.5860373377799988, 0.020966913551092148, -0.33058854937553406, 0.8745177984237671, -0.06202301010489464, -0.5764560103416443, 1.082973599433899, 0.3429752588272095, 0.864374041557312, -0.1249375119805336, 0.09136321395635605, 0.624515175819397, 0.35289451479911804, -0.11422336846590042, 0.7011183500289917, 0.10548833012580872, -0.6296587586402893, -0.20963934063911438, -0.7054319381713867, -0.16828395426273346, 0.40649354457855225, -1.0834254026412964, 0.37164121866226196, -0.1257905215024948, -0.16957038640975952, -0.16556234657764435, 0.40076640248298645, -0.804758608341217, 0.1780593991279602, 0.06601057946681976, 0.964590847492218, -1.0421603918075562, 0.509421706199646, 0.8728553056716919, -0.5490169525146484, -0.9776692390441895, -0.37244942784309387, 0.037524107843637466, -0.9303151369094849, 0.466322124004364, 0.2571292519569397, 0.48512405157089233, -0.08603174984455109, -0.6777980923652649, -1.0527422428131104, 1.6025279760360718, 0.05875709280371666, -0.6154411435127258, 0.23922857642173767, 0.1919609010219574, 0.2904771864414215, -0.2723894417285919, 0.7187421321868896, 0.7159164547920227, 0.6730388402938843, -0.11873505264520645, -0.929969310760498, 0.2947016656398773, -0.4489780366420746, -0.12112051993608475, 0.370249480009079, -1.0159966945648193, 1.0104268789291382, -0.1498456746339798, -0.005512124393135309, 0.07711036503314972, 0.3570447266101837, 0.5946983695030212, 0.258504182100296, 0.37611445784568787, 0.8456845283508301, 0.7685158252716064, -0.36718034744262695, 1.0002280473709106, -0.32228589057922363, 0.8297430872917175, 0.9969606399536133, 0.010435537435114384, 0.7433585524559021, 0.29706254601478577, -0.48916512727737427, 0.5896625518798828, 0.8739785552024841, -0.36263036727905273, 0.5025099515914917, 0.1065700575709343, -0.024630626663565636, -0.19276872277259827, 0.05982409417629242, -0.4727397859096527, 0.2938489019870758, 0.16951343417167664, -0.5538532733917236, -0.22453881800174713, -0.3673793077468872, 0.13841314613819122, -0.28416144847869873, -0.4248802363872528, 0.6077651977539062, -0.0015544568886980414, -0.4197733700275421, 0.571973443031311, -0.06815837323665619, 0.6555306315422058, -0.6698148846626282, -0.1738586127758026, -0.3493437170982361, 0.3173369765281677, -0.5970902442932129, -1.0692967176437378, 0.05917670577764511, 0.1405515968799591, -0.2542884945869446, -0.16847728192806244, 0.5546215772628784, -0.22548475861549377, -0.5959423184394836, 0.4070906341075897, 0.34230509400367737, 0.3321380913257599, 0.1004425585269928, -0.9273014664649963, 0.18526895344257355, 0.34128308296203613, -0.7363581657409668, 0.4531550109386444, 0.476004034280777, 0.02491023950278759, 0.6073654890060425, 0.6947161555290222, 0.128764808177948, 0.15251576900482178, -0.07240281254053116, 1.0292757749557495, -0.8847277164459229, -0.3827652633190155, -0.7833119630813599, 0.965125322341919, -0.2758956253528595, -0.6471463441848755, 0.8743728995323181, 1.0103178024291992, 0.7344331741333008, 0.16989146173000336, 0.8617110848426819, -0.486934632062912, 0.5377836227416992, -0.428658664226532, 0.7737950086593628, -0.7055339217185974, 0.3310229182243347, -0.18513382971286774, -0.8478116393089294, 0.032496582716703415, 0.7618167400360107, -0.13190937042236328, -0.04495377466082573, 0.6619946360588074, 0.9299248456954956, 0.04860344156622887, 0.14591200649738312, -0.09277859330177307, 0.4345526397228241, 0.2098449170589447, 0.6159920692443848, 0.7644937038421631, -0.7099547982215881, 0.40163129568099976, -0.7304421663284302, -0.4684615135192871, -0.05782785639166832, -0.7790395021438599, -0.796356201171875, -0.44741174578666687, -0.4011807143688202, -0.5424291491508484, -0.05577949061989784, 1.0375568866729736, 0.4330046474933624, -0.845318615436554, -0.43893343210220337, -0.11844984441995621, 0.1293141394853592, -0.13727134466171265, -0.3467625677585602, 0.6112053394317627, -0.0900038331747055, -0.7475961446762085, 0.3557053804397583, -0.20962852239608765, -0.1279970109462738, 0.09457594156265259, -0.19279474020004272, -0.25091737508773804, -0.27598002552986145, 0.34678781032562256, 0.16954761743545532, -0.5824604034423828, -0.275164932012558, -0.12399360537528992, 0.11091723293066025, 0.2712564766407013, 0.25889813899993896, -0.5995859503746033, -0.05828109383583069, 0.5070080161094666, 0.1545487344264984, 0.5807973742485046, 0.09495911002159119, 0.2320784330368042, -0.6259881258010864, -0.08524429798126221, -0.031679991632699966, 0.5949159860610962, 0.14684374630451202, -0.5270928144454956, 1.0061184167861938, 0.31588244438171387, -0.8330931067466736, -0.9849092364311218, -0.26460349559783936, -1.1458944082260132, 0.004394247196614742, 1.4190455675125122, -0.18784768879413605, -0.39540573954582214, 0.15618811547756195, -0.1494310051202774, 0.19518575072288513, -0.8364043831825256, 0.5107670426368713, 0.6455690264701843, -0.4150334596633911, 0.006695081014186144, -0.5810872912406921, 0.2883053719997406, 0.05332484096288681, -0.9498953223228455, 0.06599605828523636, 0.36933407187461853, 0.4838734269142151, 0.22053179144859314, 0.6232194304466248, -0.009082573466002941, -0.07812830060720444, -0.024911224842071533, 0.3319639563560486, -0.12021354585886002, -0.08086555451154709, -0.2960872948169708, -0.03230859711766243, -0.34622398018836975, -0.4530669152736664 ]
open-llm-leaderboard/details_Yukang__Llama-2-7b-longlora-100k-ft
open-llm-leaderboard
2023-12-03T15:58:35Z
201
0
[ "region:us" ]
null
2023-10-03T23:44:56Z
--- pretty_name: Evaluation run of Yukang/Llama-2-7b-longlora-100k-ft dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Yukang/Llama-2-7b-longlora-100k-ft](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yukang__Llama-2-7b-longlora-100k-ft\"\ ,\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese\ \ are the [latest results from run 2023-12-03T15:58:28.063022](https://huggingface.co/datasets/open-llm-leaderboard/details_Yukang__Llama-2-7b-longlora-100k-ft/blob/main/results_2023-12-03T15-58-28.063022.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.0,\n \"\ acc_stderr\": 0.0\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \ \ \"acc_stderr\": 0.0\n }\n}\n```" repo_url: https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|arc:challenge|25_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-03T23-44-33.008703.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_24T18_35_01.826306 path: - '**/details_harness|drop|3_2023-10-24T18-35-01.826306.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-24T18-35-01.826306.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_24T18_35_01.826306 path: - '**/details_harness|gsm8k|5_2023-10-24T18-35-01.826306.parquet' - split: 2023_12_03T15_58_28.063022 path: - '**/details_harness|gsm8k|5_2023-12-03T15-58-28.063022.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-12-03T15-58-28.063022.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hellaswag|10_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-03T23-44-33.008703.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-management|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-03T23-44-33.008703.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_03T23_44_33.008703 path: - '**/details_harness|truthfulqa:mc|0_2023-10-03T23-44-33.008703.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-03T23-44-33.008703.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_24T18_35_01.826306 path: - '**/details_harness|winogrande|5_2023-10-24T18-35-01.826306.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-24T18-35-01.826306.parquet' - config_name: results data_files: - split: 2023_10_03T23_44_33.008703 path: - results_2023-10-03T23-44-33.008703.parquet - split: 2023_10_24T18_35_01.826306 path: - results_2023-10-24T18-35-01.826306.parquet - split: 2023_12_03T15_58_28.063022 path: - results_2023-12-03T15-58-28.063022.parquet - split: latest path: - results_2023-12-03T15-58-28.063022.parquet --- # Dataset Card for Evaluation run of Yukang/Llama-2-7b-longlora-100k-ft ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Yukang/Llama-2-7b-longlora-100k-ft](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Yukang__Llama-2-7b-longlora-100k-ft", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-03T15:58:28.063022](https://huggingface.co/datasets/open-llm-leaderboard/details_Yukang__Llama-2-7b-longlora-100k-ft/blob/main/results_2023-12-03T15-58-28.063022.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3673495054244995, -0.6710866093635559, 0.25576546788215637, 0.3491402566432953, -0.37397366762161255, 0.026311947032809258, -0.4280543923377991, -0.3513733148574829, 0.48913851380348206, 0.685143768787384, -0.636682391166687, -0.9676998257637024, -0.6675434112548828, 0.3061441481113434, -0.1890251487493515, 1.29128098487854, -0.2704419195652008, -0.234047070145607, 0.019491473212838173, -0.3893927335739136, -0.4982469081878662, -0.35582461953163147, -0.34954679012298584, -0.4290674328804016, 0.5520229935646057, 0.613287091255188, 0.3862498998641968, 0.7927268147468567, 0.6620662808418274, 0.3415549695491791, -0.14295601844787598, 0.153676375746727, -0.5763295888900757, -0.10109271854162216, 0.1396530717611313, -0.6830143332481384, -0.822058379650116, 0.15598636865615845, 0.7676982879638672, 0.39371970295906067, -0.08372468501329422, 0.7420952916145325, 0.11002662777900696, 0.6374547481536865, -0.4750288128852844, 0.5749840140342712, -0.23997950553894043, 0.0022782979067415, -0.4949326813220978, -0.18104253709316254, 0.011505022644996643, -0.3610753118991852, -0.14095380902290344, -0.5160535573959351, 0.04190562665462494, -0.0270828939974308, 0.9506015181541443, 0.29391250014305115, -0.268189936876297, -0.18266499042510986, -0.2213757038116455, 0.8212743997573853, -0.9307823777198792, -0.12257856875658035, 0.6798959374427795, 0.1630709171295166, -0.34321266412734985, -0.6349324584007263, -0.40481701493263245, -0.04929778352379799, -0.213998943567276, 0.13367459177970886, 0.07546397298574448, -0.125543475151062, 0.4694240987300873, 0.6435000896453857, -0.6205856204032898, 0.11483319103717804, -0.5784730315208435, -0.05234670639038086, 0.9782053828239441, 0.3069274425506592, -0.06445672363042831, -0.5553861260414124, -0.5499700903892517, -0.36221593618392944, -0.44643110036849976, 0.17512884736061096, 0.42605820298194885, 0.5397733449935913, -0.6547070145606995, 0.8244721293449402, -0.43458160758018494, 0.4816618263721466, -0.09491590410470963, -0.44055286049842834, 0.8591763377189636, -0.560755729675293, -0.28963059186935425, 0.07147204875946045, 0.9711615443229675, 0.442541241645813, -0.0776384100317955, 0.2708855867385864, -0.27820149064064026, -0.01593822054564953, -0.10198239982128143, -0.7837485671043396, -0.060981735587120056, 0.4050338566303253, -0.5288724303245544, -0.39782199263572693, 0.15360207855701447, -1.0258333683013916, -0.2893441319465637, -0.3592491149902344, 0.2201801836490631, -0.017966514453291893, -0.3179508447647095, -0.08367184549570084, -0.008707919158041477, 0.32381102442741394, 0.15013937652111053, -0.48451632261276245, 0.3116595149040222, 0.706628680229187, 0.8787577152252197, -0.2631048560142517, -0.47858789563179016, -0.45040708780288696, -0.22982364892959595, -0.1911490261554718, 0.4308570623397827, -0.10070014744997025, -0.3234591484069824, -0.09719132632017136, 0.30401214957237244, -0.19397395849227905, -0.6286631226539612, 0.6970728039741516, -0.3506753742694855, 0.175856813788414, -0.3798971474170685, -0.47299060225486755, -0.2138318121433258, 0.363482266664505, -0.7434775829315186, 1.3574568033218384, 0.4272165298461914, -0.8720360398292542, 0.217299684882164, -0.8644492626190186, -0.13047517836093903, 0.05757023021578789, 0.1318691074848175, -0.5997226238250732, -0.2144252061843872, 0.07309932261705399, 0.6255337595939636, -0.34971514344215393, 0.1728101670742035, -0.31145837903022766, -0.46397846937179565, 0.08937907963991165, -0.05127042159438133, 1.0357400178909302, 0.21349211037158966, -0.3928621709346771, 0.23639388382434845, -1.0005360841751099, 0.034035079181194305, 0.38493239879608154, -0.6384114623069763, -0.1551954746246338, -0.25306546688079834, 0.17148706316947937, 0.0787895992398262, 0.6125084161758423, -0.47270968556404114, 0.36375924944877625, -0.05089546740055084, 0.15891654789447784, 1.0158932209014893, -0.02907007560133934, 0.10555736720561981, -0.3994339108467102, 0.5497239828109741, 0.06588836759328842, 0.2205563336610794, 0.14237646758556366, -0.5484668612480164, -0.9132304191589355, -0.16503429412841797, 0.11742851883172989, 0.6047385334968567, -0.4930593967437744, 0.8186867833137512, -0.4755711853504181, -0.6483717560768127, -0.6344233155250549, 0.33501583337783813, 0.42003777623176575, 0.5224717855453491, 0.41501301527023315, -0.3379775285720825, -0.7308657765388489, -0.9797899127006531, 0.11138755828142166, -0.2326490581035614, 0.18553268909454346, 0.38797590136528015, 1.0699701309204102, -0.35711658000946045, 0.7029727697372437, -0.7421432137489319, -0.41044482588768005, -0.32931557297706604, 0.0015538361622020602, 0.8469105958938599, 0.4380064010620117, 0.537185549736023, -0.5573191046714783, -0.37288448214530945, -0.13953223824501038, -0.8077605366706848, -0.23744799196720123, -0.08679649978876114, -0.3815148174762726, 0.3111231327056885, -0.14371050894260406, -0.4898187816143036, 0.5600632429122925, 0.5862607359886169, -0.5700266361236572, 0.7126707434654236, 0.0622880645096302, 0.3212374448776245, -1.2143737077713013, 0.18360845744609833, 0.12381160259246826, -0.05395030230283737, -0.3681562840938568, 0.06969785690307617, -0.051875125616788864, 0.37375932931900024, -0.44290363788604736, 0.5314991474151611, -0.3584752678871155, -0.21534565091133118, -0.11437569558620453, 0.05504221469163895, -0.10291310399770737, 0.6885679960250854, -0.27657008171081543, 0.7251812219619751, 0.41164788603782654, -0.392724871635437, 0.5785883069038391, 0.46353787183761597, -0.5989390015602112, 0.2906450629234314, -0.5205381512641907, 0.1694118082523346, 0.1752663552761078, 0.34997671842575073, -0.7432992458343506, -0.479867547750473, 0.5043206810951233, -0.29472512006759644, 0.16898725926876068, -0.2672809064388275, -0.6703051924705505, -0.47116997838020325, -0.5816546082496643, 0.3521369397640228, 0.49461716413497925, -0.6069642901420593, 0.11337015777826309, 0.3334754705429077, 0.08415764570236206, -0.6517459750175476, -0.6840243935585022, -0.10907119512557983, -0.5268489122390747, -0.5673290491104126, 0.19177579879760742, -0.25752174854278564, -0.23829926550388336, -0.1307094544172287, 0.04373202845454216, -0.0005712250713258982, 0.12403184920549393, 0.2947624623775482, 0.47078943252563477, -0.19926798343658447, -0.2670886218547821, -0.3056216537952423, -0.11646412312984467, 0.1918230801820755, 0.2519686818122864, 0.6076456308364868, -0.231832355260849, -0.2743573784828186, -0.2915246784687042, 0.09005983918905258, 0.38337060809135437, -0.13241644203662872, 0.752518355846405, 0.7948853969573975, -0.2400050163269043, -0.04103478416800499, -0.4759828448295593, 0.10882982611656189, -0.4381740093231201, 0.4234531819820404, -0.1924995332956314, -0.6900932192802429, 0.7784838676452637, 0.25633004307746887, 0.17972010374069214, 0.7076820135116577, 0.5779933929443359, 0.18674826622009277, 0.5987507700920105, 0.3562808632850647, -0.3254687488079071, 0.515823245048523, -0.7562807202339172, -0.17789362370967865, -1.122400164604187, -0.3822658061981201, -0.4250620901584625, -0.4558490514755249, -0.8532008528709412, -0.4818286597728729, 0.2945670485496521, 0.20030350983142853, -0.45412373542785645, 0.48392120003700256, -0.5630320310592651, 0.24532125890254974, 0.5750682353973389, 0.2592519521713257, 0.13540874421596527, -0.07988645136356354, 0.12643659114837646, 0.2578521966934204, -0.412264883518219, -0.41812044382095337, 1.3671317100524902, 0.42355918884277344, 0.6082247495651245, -0.0007406123331747949, 1.0271590948104858, 0.2647486925125122, 0.33895137906074524, -0.5986879467964172, 0.6941074728965759, 0.03909855708479881, -0.44661712646484375, -0.19760186970233917, -0.5191150903701782, -0.9696558117866516, 0.18191391229629517, -0.017835119739174843, -0.8453677296638489, 0.05084693059325218, -0.11607322841882706, -0.06981515884399414, 0.28636935353279114, -0.4524616003036499, 0.7993165254592896, -0.35493138432502747, -0.41682371497154236, -0.027240481227636337, -0.8528912663459778, 0.5363053679466248, -0.06965645402669907, 0.22277118265628815, -0.3264848291873932, 0.0254347063601017, 1.2537109851837158, -0.5695271492004395, 0.9058634042739868, -0.24878555536270142, -0.014089112170040607, 0.4545412063598633, -0.4280705749988556, 0.5593461394309998, -0.05602199211716652, -0.25006791949272156, 0.4487992525100708, -0.25148022174835205, -0.3355275094509125, -0.29865512251853943, 0.8619886040687561, -0.9547303915023804, -0.4117021858692169, -0.560949444770813, -0.574589192867279, 0.24066175520420074, 0.25061362981796265, 0.21963970363140106, 0.03494774177670479, 0.036244794726371765, 0.2241005003452301, 0.2617270052433014, -0.15372087061405182, 0.5132961869239807, 0.30315136909484863, -0.30768829584121704, -0.725300133228302, 0.6394152641296387, 0.237882599234581, 0.08153387904167175, 0.08944141864776611, 0.1444321870803833, -0.47002938389778137, -0.476631224155426, -0.3677976727485657, 0.3604419529438019, -0.6903226375579834, -0.3934447169303894, -0.2654576003551483, -0.28990864753723145, -0.37565353512763977, 0.15440338850021362, -0.32872292399406433, -0.35894322395324707, -0.5548940896987915, -0.3277948796749115, 0.687448263168335, 0.6754423379898071, -0.26405224204063416, 0.37689879536628723, -0.6986804604530334, 0.3005565106868744, -0.10006659477949142, 0.46348240971565247, -0.04644050821661949, -0.5403538346290588, -0.5315713286399841, 0.18573570251464844, -0.40263253450393677, -0.9000556468963623, 0.6153128743171692, -0.015332227572798729, 0.5896070003509521, 0.23193968832492828, 0.06261412799358368, 0.9199021458625793, -0.08527575433254242, 1.0679038763046265, 0.03519028052687645, -0.60353684425354, 0.7231048345565796, -0.3329012095928192, 0.11892306804656982, 0.5091255903244019, 0.14418715238571167, -0.39547842741012573, 0.005066311452537775, -0.7069886922836304, -1.1783136129379272, 0.9333896636962891, 0.42189010977745056, -0.3212733268737793, 0.12068946659564972, 0.3689747452735901, 0.0806301087141037, 0.159810870885849, -0.8572717905044556, -0.667161226272583, -0.12996841967105865, -0.09255912154912949, -0.2558937072753906, -0.19584453105926514, -0.4904678761959076, -0.40750542283058167, 0.8865097761154175, -0.11636251956224442, 0.25607484579086304, 0.12138688564300537, -0.03387675806879997, -0.2093416452407837, 0.24629735946655273, 0.41908013820648193, 0.6614475250244141, -0.44481125473976135, -0.15791189670562744, 0.3300257623195648, -0.5284549593925476, 0.036247409880161285, 0.2598817050457001, 0.10500569641590118, -0.23435969650745392, 0.7486879229545593, 0.9663065671920776, 0.06215250864624977, -0.3855249583721161, 0.4099052846431732, 0.16284003853797913, -0.14383237063884735, -0.5180022716522217, 0.1830318719148636, -0.09293012320995331, 0.36301735043525696, 0.3457482159137726, -0.21467795968055725, -0.030918415635824203, -0.3974725008010864, 0.1861652284860611, 0.13333439826965332, 0.012110071256756783, -0.31110259890556335, 0.477761834859848, -0.052267495542764664, -0.22636379301548004, 0.7834079265594482, -0.0889284759759903, -0.5200968384742737, 1.0375556945800781, 0.42296284437179565, 0.8919346332550049, -0.022583896294236183, 0.08386720716953278, 0.5315823554992676, 0.09786570817232132, -0.19053100049495697, 0.6187230944633484, 0.05291006714105606, -0.5019900798797607, -0.26300036907196045, -0.8790877461433411, -0.09961622208356857, 0.4124790132045746, -0.975018322467804, 0.3640638291835785, -0.20348334312438965, -0.2894951105117798, -0.2703024744987488, 0.610090434551239, -0.8677489161491394, 0.07431232184171677, 0.09273375570774078, 0.8248375654220581, -1.0470819473266602, 0.6112760305404663, 0.7283095717430115, -0.5401474833488464, -0.8743939995765686, -0.35621169209480286, 0.0679626390337944, -0.8777536749839783, 0.543976902961731, 0.2992110550403595, 0.4143182933330536, -0.4169948995113373, -0.7433544993400574, -1.051579236984253, 1.6358622312545776, 0.16982890665531158, -0.49258315563201904, 0.22770117223262787, 0.3651169240474701, 0.5169085264205933, -0.37707212567329407, 0.4305424690246582, 0.7888709902763367, 0.8365774750709534, 0.0387912280857563, -1.013502597808838, 0.2547035813331604, -0.48608899116516113, -0.1086544618010521, 0.1176568865776062, -1.0176324844360352, 1.0283747911453247, -0.219663605093956, 0.048506490886211395, 0.15739376842975616, 0.6097961068153381, 0.7208924293518066, 0.4707934558391571, 0.5057989954948425, 0.7707816958427429, 0.6690484285354614, -0.20546497404575348, 1.0283803939819336, -0.10457944124937057, 0.710620641708374, 1.1422663927078247, -0.06544341146945953, 0.775550365447998, 0.3922610580921173, -0.44795021414756775, 0.5233520269393921, 0.9394488334655762, -0.37857940793037415, 0.39306551218032837, 0.2932495176792145, 0.0210960004478693, 0.0003171912394464016, -0.11383013427257538, -0.5423219203948975, 0.5946576595306396, 0.1361052542924881, -0.5855178833007812, -0.1888551115989685, -0.45123687386512756, 0.23005634546279907, -0.3759196996688843, -0.2702957093715668, 0.5466622710227966, 0.10889613628387451, -0.6144269704818726, 0.7987865805625916, -0.10399685055017471, 0.7229166626930237, -0.6698419451713562, -0.24131548404693604, -0.2540895640850067, 0.214690163731575, -0.5768969058990479, -0.9174739718437195, 0.0635082945227623, 0.130889892578125, -0.17541582882404327, -0.09995000809431076, 0.4962037205696106, -0.33353471755981445, -0.588970422744751, 0.4533759653568268, 0.4058418273925781, 0.42021670937538147, 0.2226741909980774, -0.8595933318138123, 0.22292302548885345, 0.22202454507350922, -0.7751262784004211, 0.49278879165649414, 0.21053825318813324, -0.07620079070329666, 0.5846124887466431, 0.7765097618103027, 0.17762650549411774, 0.06742215901613235, 0.046655625104904175, 1.0569922924041748, -0.7548275589942932, -0.3848951458930969, -0.7989025712013245, 0.792083203792572, -0.2051876038312912, -0.6642783284187317, 0.7983142733573914, 0.913446843624115, 0.7354931831359863, 0.16161254048347473, 0.6692390441894531, -0.23995614051818848, 0.4131608009338379, -0.4278763234615326, 0.8538119196891785, -0.6368627548217773, 0.40388211607933044, -0.2887343764305115, -1.0212205648422241, -0.03248807042837143, 0.5972025394439697, -0.13212205469608307, 0.043804023414850235, 0.5341019034385681, 0.8062499761581421, 0.020360345020890236, 0.06302034109830856, -0.10190171003341675, 0.47918954491615295, 0.38219404220581055, 0.6528414487838745, 0.5711427927017212, -0.6229800581932068, 0.27501362562179565, -0.7228235006332397, -0.36184993386268616, -0.14857684075832367, -0.8342195749282837, -0.8316153287887573, -0.4842873811721802, -0.3063719868659973, -0.5216290354728699, -0.0543365515768528, 1.132802128791809, 0.4152699112892151, -0.7709404826164246, -0.5796745419502258, 0.22783882915973663, 0.20577096939086914, -0.1578429937362671, -0.30662330985069275, 0.7344571352005005, 0.00484843272715807, -0.7157944440841675, 0.389203280210495, -0.24436162412166595, -0.06801435351371765, -0.14374516904354095, -0.23208558559417725, -0.3452906310558319, -0.39863866567611694, 0.4754292070865631, 0.19267509877681732, -0.6654534935951233, -0.344858855009079, -0.23310261964797974, 0.011514021083712578, 0.2978046238422394, 0.15518856048583984, -0.674058198928833, 0.04870016872882843, 0.6319300532341003, 0.15786290168762207, 0.6221673488616943, 0.02902623638510704, 0.1639510691165924, -0.657931387424469, -0.008424964733421803, 0.034601859748363495, 0.6061450839042664, 0.1973758190870285, -0.4693377912044525, 1.0768429040908813, 0.25704994797706604, -0.710860013961792, -0.8964459300041199, -0.19078825414180756, -1.2825678586959839, 0.061759065836668015, 1.4774785041809082, -0.35135677456855774, -0.4229452311992645, 0.23148323595523834, -0.14175289869308472, 0.45213156938552856, -0.6589819192886353, 0.636172354221344, 0.6658326983451843, -0.3286241888999939, 0.06039600074291229, -0.5257983803749084, 0.3293956220149994, 0.02799844928085804, -1.0400426387786865, 0.020644258707761765, 0.4121783375740051, 0.44025900959968567, 0.22302259504795074, 0.5913629531860352, -0.04971548169851303, -0.0940108671784401, -0.15444813668727875, 0.08947770297527313, -0.20638710260391235, -0.11901967972517014, -0.26608237624168396, 0.08978911489248276, -0.29481011629104614, -0.393667072057724 ]
open-llm-leaderboard/details_HWERI__pythia-70m-deduped-cleansharegpt-en
open-llm-leaderboard
2023-10-24T19:44:30Z
201
0
[ "region:us" ]
null
2023-10-04T00:34:54Z
--- pretty_name: Evaluation run of HWERI/pythia-70m-deduped-cleansharegpt-en dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [HWERI/pythia-70m-deduped-cleansharegpt-en](https://huggingface.co/HWERI/pythia-70m-deduped-cleansharegpt-en)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_HWERI__pythia-70m-deduped-cleansharegpt-en\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-24T19:44:16.405548](https://huggingface.co/datasets/open-llm-leaderboard/details_HWERI__pythia-70m-deduped-cleansharegpt-en/blob/main/results_2023-10-24T19-44-16.405548.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.008179530201342282,\n\ \ \"em_stderr\": 0.0009224022743034306,\n \"f1\": 0.03149853187919465,\n\ \ \"f1_stderr\": 0.0013037868918641129,\n \"acc\": 0.2505919494869771,\n\ \ \"acc_stderr\": 0.007026223145264506\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.008179530201342282,\n \"em_stderr\": 0.0009224022743034306,\n\ \ \"f1\": 0.03149853187919465,\n \"f1_stderr\": 0.0013037868918641129\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\"\ : 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5011838989739542,\n\ \ \"acc_stderr\": 0.014052446290529012\n }\n}\n```" repo_url: https://huggingface.co/HWERI/pythia-70m-deduped-cleansharegpt-en leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|arc:challenge|25_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-04T00-34-36.927463.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_24T19_44_16.405548 path: - '**/details_harness|drop|3_2023-10-24T19-44-16.405548.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-24T19-44-16.405548.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_24T19_44_16.405548 path: - '**/details_harness|gsm8k|5_2023-10-24T19-44-16.405548.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-24T19-44-16.405548.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hellaswag|10_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-04T00-34-36.927463.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-management|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T00-34-36.927463.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_04T00_34_36.927463 path: - '**/details_harness|truthfulqa:mc|0_2023-10-04T00-34-36.927463.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-04T00-34-36.927463.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_24T19_44_16.405548 path: - '**/details_harness|winogrande|5_2023-10-24T19-44-16.405548.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-24T19-44-16.405548.parquet' - config_name: results data_files: - split: 2023_10_04T00_34_36.927463 path: - results_2023-10-04T00-34-36.927463.parquet - split: 2023_10_24T19_44_16.405548 path: - results_2023-10-24T19-44-16.405548.parquet - split: latest path: - results_2023-10-24T19-44-16.405548.parquet --- # Dataset Card for Evaluation run of HWERI/pythia-70m-deduped-cleansharegpt-en ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/HWERI/pythia-70m-deduped-cleansharegpt-en - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [HWERI/pythia-70m-deduped-cleansharegpt-en](https://huggingface.co/HWERI/pythia-70m-deduped-cleansharegpt-en) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_HWERI__pythia-70m-deduped-cleansharegpt-en", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T19:44:16.405548](https://huggingface.co/datasets/open-llm-leaderboard/details_HWERI__pythia-70m-deduped-cleansharegpt-en/blob/main/results_2023-10-24T19-44-16.405548.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.008179530201342282, "em_stderr": 0.0009224022743034306, "f1": 0.03149853187919465, "f1_stderr": 0.0013037868918641129, "acc": 0.2505919494869771, "acc_stderr": 0.007026223145264506 }, "harness|drop|3": { "em": 0.008179530201342282, "em_stderr": 0.0009224022743034306, "f1": 0.03149853187919465, "f1_stderr": 0.0013037868918641129 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.5011838989739542, "acc_stderr": 0.014052446290529012 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3415045142173767, -0.6597846150398254, 0.1887601613998413, 0.12727180123329163, -0.16275346279144287, 0.11511514335870743, -0.3949447572231293, -0.15345966815948486, 0.3488658666610718, 0.48451465368270874, -0.6654649972915649, -0.9131340384483337, -0.6484573483467102, 0.16349643468856812, -0.11716854572296143, 1.0536843538284302, -0.280695378780365, -0.2328580766916275, 0.12136687338352203, -0.24055437743663788, -0.31186866760253906, -0.4631008803844452, -0.45588135719299316, -0.4953010082244873, 0.41228187084198, 0.6920015811920166, 0.45351889729499817, 0.6927326321601868, 0.6731775403022766, 0.3865881860256195, -0.16392584145069122, 0.1500183790922165, -0.4164714813232422, -0.06365316361188889, 0.1377677172422409, -0.5432602167129517, -0.782576858997345, 0.1847074180841446, 0.7090060710906982, 0.4663969874382019, -0.15074007213115692, 0.6745526194572449, 0.1337532252073288, 0.6005566120147705, -0.5906786918640137, 0.34890562295913696, -0.3212490975856781, -0.03527971729636192, -0.28741219639778137, -0.0870581567287445, -0.027311382815241814, -0.4015454053878784, -0.09919591248035431, -0.513325035572052, 0.1794339120388031, 0.08532340824604034, 1.1281737089157104, 0.08452562987804413, -0.18990837037563324, -0.1818619817495346, -0.3682094216346741, 0.8684653639793396, -0.926413357257843, -0.022829938679933548, 0.6743348836898804, 0.11610550433397293, -0.2659751772880554, -0.5876699090003967, -0.4448494613170624, -0.04091370850801468, -0.26753491163253784, 0.12738072872161865, -0.08594007790088654, -0.12573398649692535, 0.5138496160507202, 0.7544897794723511, -0.6553752422332764, 0.026813602074980736, -0.5047805905342102, -0.18284828960895538, 0.9635745286941528, 0.34153592586517334, 0.1159999668598175, -0.5982434749603271, -0.27842363715171814, -0.36087286472320557, -0.4785522520542145, 0.1551743894815445, 0.5479215979576111, 0.3557824492454529, -0.6513397693634033, 0.8324286937713623, -0.5442061424255371, 0.5175080895423889, -0.10701075941324234, -0.21575544774532318, 0.8541150093078613, -0.5338594317436218, -0.2524646520614624, -0.003021115902811289, 1.114450454711914, 0.32748350501060486, 0.07304693758487701, 0.19993741810321808, -0.23076297342777252, -0.174649178981781, -0.0144058708101511, -0.8693454265594482, -0.3815207779407501, 0.34088462591171265, -0.6458218097686768, -0.4350700378417969, 0.24316559731960297, -1.012884497642517, -0.19613857567310333, -0.2965514659881592, 0.29522570967674255, -0.20723579823970795, -0.47506171464920044, -0.11503641307353973, -0.1596810221672058, 0.2014356404542923, 0.18868333101272583, -0.5863801836967468, 0.29346609115600586, 0.5762243270874023, 0.9907363653182983, -0.03997420892119408, -0.41810837388038635, -0.3549286723136902, -0.24351778626441956, -0.08408112078905106, 0.28974783420562744, -0.10062427818775177, -0.38479849696159363, -0.16816842555999756, 0.34272852540016174, -0.2875310480594635, -0.5804277658462524, 0.7208541035652161, -0.23166531324386597, 0.12152260541915894, -0.376800000667572, -0.44816336035728455, -0.21555043756961823, 0.38408175110816956, -0.6760771870613098, 1.3233152627944946, 0.36682891845703125, -0.915704071521759, 0.08257083594799042, -0.7418391108512878, -0.1836954951286316, 0.06489664316177368, 0.12247860431671143, -0.6226015686988831, -0.13612104952335358, 0.11897684633731842, 0.4493931531906128, -0.3003777861595154, 0.05877525359392166, -0.27211305499076843, -0.4556163549423218, 0.173129141330719, -0.2176538109779358, 1.0334266424179077, 0.23986881971359253, -0.5058802366256714, 0.15137067437171936, -0.9771056175231934, 0.15217863023281097, 0.3384566605091095, -0.5682361125946045, -0.21824732422828674, -0.2168153077363968, 0.1725119948387146, 0.18813998997211456, 0.513639509677887, -0.6098281145095825, 0.31530267000198364, -0.2568957507610321, 0.3902197778224945, 0.9391791224479675, 0.027903735637664795, 0.23252466320991516, -0.4551411271095276, 0.4879194498062134, -0.010787482373416424, 0.28699684143066406, 0.12865377962589264, -0.531339704990387, -0.792461633682251, -0.2827085554599762, 0.20613528788089752, 0.6675310730934143, -0.4719351828098297, 0.8210127353668213, -0.4019075930118561, -0.6987745761871338, -0.6900376677513123, 0.12004915624856949, 0.4852634072303772, 0.5753652453422546, 0.3649708330631256, -0.19732333719730377, -0.6629102826118469, -1.0133963823318481, -0.01406898908317089, -0.2315075397491455, 0.05510043725371361, 0.5112525224685669, 1.1928796768188477, -0.26710304617881775, 0.6219485998153687, -0.7780842781066895, -0.22187325358390808, -0.25022295117378235, 0.10697793960571289, 0.7400328516960144, 0.6038175225257874, 0.3103748857975006, -0.7112340927124023, -0.30303290486335754, -0.10034126043319702, -0.7806494235992432, -0.15195254981517792, -0.03125, -0.3693033754825592, 0.24333816766738892, -0.1321258395910263, -0.5962547063827515, 0.6005955338478088, 0.5065457820892334, -0.6128435730934143, 0.7413554787635803, -0.08729050308465958, 0.5211900472640991, -1.2135266065597534, 0.2555876076221466, 0.06471063196659088, 0.02661467157304287, -0.37403932213783264, -0.08406387269496918, 0.022624367848038673, 0.2712520360946655, -0.4237823486328125, 0.601681113243103, -0.45321574807167053, -0.17875534296035767, -0.009405617602169514, 0.13043735921382904, -0.08034142851829529, 0.5724611878395081, -0.3018399775028229, 0.7591755390167236, 0.49434730410575867, -0.4474746286869049, 0.4675219655036926, 0.5854514241218567, -0.5467836260795593, 0.1646655946969986, -0.5553109049797058, 0.01386671420186758, 0.2859915792942047, 0.14854995906352997, -0.8894241452217102, -0.4298211336135864, 0.5223696827888489, -0.566213071346283, 0.13967637717723846, -0.4026886224746704, -0.6155039668083191, -0.5639078617095947, -0.4901113212108612, 0.3451715409755707, 0.6126915812492371, -0.5495848059654236, 0.28886207938194275, 0.34837695956230164, 0.0745180994272232, -0.5756582021713257, -0.6823002696037292, -0.14469628036022186, -0.33099105954170227, -0.6220422387123108, 0.4184828996658325, -0.06589269638061523, -0.3410171568393707, 0.0652211606502533, -0.07063653320074081, -0.033469971269369125, 0.21995384991168976, 0.39628714323043823, 0.516905665397644, -0.08423197269439697, -0.29490986466407776, -0.24470357596874237, -0.07323363423347473, 0.11926691234111786, 0.08133561909198761, 0.67569500207901, -0.23675164580345154, -0.28288453817367554, -0.4441695213317871, 0.10633698105812073, 0.540212094783783, -0.29586976766586304, 0.7879899144172668, 0.68165123462677, -0.3769921362400055, 0.022001206874847412, -0.34239962697029114, 0.030902117490768433, -0.48794567584991455, 0.34103548526763916, -0.36928266286849976, -0.6987840533256531, 0.8268923759460449, 0.19447432458400726, 0.10107669234275818, 0.751631498336792, 0.6082081198692322, 0.07997412979602814, 0.7939284443855286, 0.1369713693857193, -0.21683694422245026, 0.5606324076652527, -0.8658736348152161, -0.02041993848979473, -1.176640272140503, -0.37299615144729614, -0.5497934818267822, -0.4433319568634033, -0.954584538936615, -0.28443995118141174, 0.2195177972316742, 0.2914799153804779, -0.5617571473121643, 0.4575830101966858, -0.6948075890541077, 0.1633003056049347, 0.7090826034545898, 0.28276628255844116, 0.0909632071852684, -0.03429831191897392, 0.038952119648456573, 0.20729295909404755, -0.453570157289505, -0.42979860305786133, 1.3511079549789429, 0.30665820837020874, 0.6628812551498413, 0.03245076909661293, 1.0673987865447998, 0.24495752155780792, 0.2717706561088562, -0.6165074110031128, 0.6137382984161377, -0.0020667409989982843, -0.44513922929763794, -0.23115980625152588, -0.6817078590393066, -0.8984444737434387, 0.27659955620765686, -0.05739690363407135, -0.9474512934684753, 0.08760477602481842, -0.021817604079842567, -0.10031970590353012, 0.41985780000686646, -0.5377429127693176, 0.8474274277687073, -0.23803389072418213, -0.48677533864974976, -0.06763055920600891, -0.7038277387619019, 0.4092332124710083, 0.2058664858341217, 0.2892075181007385, -0.2688950300216675, 0.1617809385061264, 1.2233558893203735, -0.6279042959213257, 0.6655849814414978, -0.27704501152038574, 0.08992638438940048, 0.4541323781013489, -0.290885865688324, 0.5645814538002014, 0.015658186748623848, -0.2109987735748291, 0.4050545394420624, -0.16299577057361603, -0.3223065137863159, -0.2592713236808777, 0.9742583632469177, -0.9064217209815979, -0.3871649503707886, -0.529716432094574, -0.5831742286682129, 0.255758672952652, 0.2660602629184723, 0.35162973403930664, 0.32256412506103516, 0.05713322013616562, 0.16896675527095795, 0.3457431495189667, -0.19045113027095795, 0.573826014995575, 0.36795395612716675, -0.09322654455900192, -0.7415968179702759, 0.6771743893623352, 0.19294743239879608, 0.05458284914493561, 0.1301630139350891, 0.1428115963935852, -0.5069975852966309, -0.48615339398384094, -0.4510136544704437, 0.32028505206108093, -0.5528160333633423, -0.2910853922367096, -0.4278365969657898, -0.22795817255973816, -0.37528473138809204, 0.09911613911390305, -0.37368297576904297, -0.445538729429245, -0.41501715779304504, -0.22942599654197693, 0.6270721554756165, 0.5217413902282715, -0.27356600761413574, 0.31206756830215454, -0.8620715737342834, 0.27792996168136597, -0.1925032138824463, 0.37654027342796326, -0.08674197643995285, -0.5598151683807373, -0.4263298511505127, 0.26341718435287476, -0.43717634677886963, -0.994924783706665, 0.6049056053161621, -0.051013391464948654, 0.6395654678344727, 0.02040017582476139, 0.08615507185459137, 0.7593765258789062, -0.14416919648647308, 1.0125600099563599, -0.008388074114918709, -0.78599613904953, 0.7999725341796875, -0.3705260753631592, 0.13863441348075867, 0.5969674587249756, 0.1552542895078659, -0.47708627581596375, -0.18457725644111633, -0.8757484555244446, -1.167730689048767, 0.9749181270599365, 0.5377377271652222, -0.32674726843833923, 0.1581472009420395, 0.4171200394630432, -0.1312180608510971, 0.18611569702625275, -0.6303915977478027, -0.6746031641960144, -0.06967328488826752, -0.369750052690506, -0.011403609998524189, -0.024165714159607887, -0.4302510619163513, -0.549065887928009, 0.9484778046607971, 0.001552509842440486, 0.39773446321487427, 0.2926935851573944, -0.13314276933670044, -0.10060808807611465, 0.28072309494018555, 0.3322255313396454, 0.720792293548584, -0.3676655888557434, -0.09870368242263794, 0.2019069492816925, -0.635405421257019, 0.11132362484931946, 0.311271071434021, -0.0904839038848877, -0.09350352734327316, 0.563950777053833, 0.9338065385818481, 0.04173951968550682, -0.38223299384117126, 0.4707691967487335, 0.09490154683589935, -0.3335597813129425, -0.475534051656723, 0.1313285529613495, -0.059108659625053406, 0.3488132953643799, 0.40164482593536377, -0.10546771436929703, 0.04305681586265564, -0.36416879296302795, 0.2923518121242523, 0.20871083438396454, -0.025967899709939957, -0.3314865231513977, 0.5383429527282715, -0.009516164660453796, -0.36244890093803406, 0.9194030165672302, -0.22959890961647034, -0.658577561378479, 1.1132653951644897, 0.33531683683395386, 0.9496937394142151, -0.07930231094360352, 0.1288069486618042, 0.7196319699287415, 0.2351059913635254, -0.2145770937204361, 0.6607727408409119, 0.0836055800318718, -0.561122477054596, -0.21961016952991486, -0.8344337344169617, -0.21952545642852783, 0.3951260447502136, -1.0503671169281006, 0.30938830971717834, -0.08811505138874054, -0.19351665675640106, -0.16191276907920837, 0.4331827163696289, -0.8534032106399536, 0.22496351599693298, -0.014854863286018372, 0.8399582505226135, -1.027009129524231, 0.7025113105773926, 0.799988865852356, -0.550093948841095, -0.9519218802452087, -0.15049830079078674, -0.006477581337094307, -0.6713295578956604, 0.5036860704421997, 0.23535501956939697, 0.3558737337589264, -0.1402580887079239, -0.5657035708427429, -1.0159679651260376, 1.545746088027954, 0.05754541978240013, -0.6651656031608582, 0.10090164095163345, 0.0678410604596138, 0.3519948422908783, -0.20990467071533203, 0.4687173366546631, 0.7790195345878601, 0.7485611438751221, -0.05683641508221626, -0.9311771392822266, 0.3641926944255829, -0.46162861585617065, -0.1549471914768219, 0.35671329498291016, -0.8369638919830322, 1.1451165676116943, -0.18178312480449677, 0.013576431199908257, 0.05409281328320503, 0.38643476366996765, 0.5435535907745361, 0.32764923572540283, 0.4316056966781616, 0.7980131506919861, 0.7423543334007263, -0.4034334421157837, 0.9989139437675476, -0.3100228011608124, 0.8655610680580139, 1.076837420463562, 0.0009894809918478131, 0.7396857142448425, 0.36555078625679016, -0.4381439685821533, 0.5158196091651917, 0.8221564888954163, -0.47124624252319336, 0.4479159414768219, 0.2726423144340515, -0.09777572751045227, -0.03145638480782509, 0.08456223458051682, -0.41386452317237854, 0.5091357231140137, 0.08661668002605438, -0.5870875120162964, -0.1521192342042923, -0.30592676997184753, 0.13541088998317719, -0.2583690881729126, -0.26794907450675964, 0.5172058939933777, -0.022558096796274185, -0.584842324256897, 0.716071605682373, 0.02012459561228752, 0.7458023428916931, -0.5616667866706848, -0.20413005352020264, -0.13895733654499054, 0.3323853313922882, -0.5491089820861816, -0.9368191957473755, 0.1999913603067398, 0.08819010108709335, -0.21221111714839935, -0.14871078729629517, 0.6677205562591553, -0.3479576110839844, -0.5459789037704468, 0.3573145866394043, 0.486237496137619, 0.24348542094230652, 0.03890720009803772, -0.856605589389801, 0.09842217713594437, 0.23827771842479706, -0.8282564878463745, 0.3869326710700989, 0.19793573021888733, 0.01705438271164894, 0.5423666834831238, 0.7287787199020386, 0.11151555180549622, 0.03042025864124298, -0.028044093400239944, 1.0459179878234863, -0.8167687654495239, -0.37369808554649353, -0.9438585042953491, 0.8333510756492615, -0.2660028040409088, -0.7051776647567749, 0.9140874743461609, 1.014098048210144, 0.7748123407363892, 0.1065623015165329, 0.8332861065864563, -0.4879254400730133, 0.3637157678604126, -0.3932065963745117, 0.9410963654518127, -0.6571915745735168, 0.21405966579914093, -0.19005773961544037, -0.7556685209274292, 0.10395219177007675, 0.7067722082138062, -0.2506829500198364, -0.01327758189290762, 0.5813266038894653, 0.8821658492088318, 0.04172627627849579, 0.14520424604415894, -0.1549140363931656, 0.3546218276023865, 0.3475325107574463, 0.6949862241744995, 0.698426365852356, -0.7523853182792664, 0.3518974781036377, -0.7037477493286133, -0.41468048095703125, -0.08359330147504807, -0.62163907289505, -0.74078768491745, -0.5098918080329895, -0.40746980905532837, -0.6033154726028442, -0.030144408345222473, 1.0276713371276855, 0.4647636115550995, -0.8326348662376404, -0.3707410991191864, -0.07979574054479599, 0.245766282081604, -0.2542578876018524, -0.3553381562232971, 0.651094377040863, 0.020292233675718307, -0.8045445084571838, 0.2908843457698822, -0.23762084543704987, -0.14798085391521454, 0.008529026061296463, -0.22749857604503632, -0.39408013224601746, -0.3509306013584137, 0.35933154821395874, 0.09709098190069199, -0.5813319087028503, -0.30766814947128296, -0.1222318485379219, -0.07620057463645935, 0.26786503195762634, 0.38376009464263916, -0.5364067554473877, -0.03656501695513725, 0.7134667038917542, 0.165912926197052, 0.6949785351753235, 0.08556864410638809, 0.21548569202423096, -0.6190409660339355, -0.014976555481553078, 0.10595188289880753, 0.555740475654602, 0.20730483531951904, -0.47607576847076416, 0.9031625390052795, 0.39139819145202637, -0.74466472864151, -0.8959123492240906, -0.1335524618625641, -1.0682504177093506, -0.02254345454275608, 1.489992618560791, -0.3688645362854004, -0.44102972745895386, 0.15127915143966675, -0.24040454626083374, 0.4804202616214752, -0.6343911290168762, 0.5564875602722168, 0.7308028340339661, -0.26353511214256287, 0.04358118027448654, -0.6367486119270325, 0.36288982629776, 0.08184865117073059, -0.972429096698761, 0.06876421719789505, 0.44401228427886963, 0.5065222978591919, 0.21613140404224396, 0.6629869937896729, -0.09230413287878036, -0.05958271026611328, -0.0009456814732402563, 0.22572341561317444, -0.31218793988227844, -0.12696868181228638, -0.2774163782596588, 0.12639521062374115, -0.3073755204677582, -0.6482722163200378 ]
open-llm-leaderboard/details_maywell__Synatra-V0.1-7B
open-llm-leaderboard
2023-10-23T11:25:26Z
201
0
[ "region:us" ]
null
2023-10-10T04:31:22Z
--- pretty_name: Evaluation run of maywell/Synatra-V0.1-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [maywell/Synatra-V0.1-7B](https://huggingface.co/maywell/Synatra-V0.1-7B) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_maywell__Synatra-V0.1-7B\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-23T11:25:13.204412](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__Synatra-V0.1-7B/blob/main/results_2023-10-23T11-25-13.204412.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.32246224832214765,\n\ \ \"em_stderr\": 0.004786806140711669,\n \"f1\": 0.3963055788590608,\n\ \ \"f1_stderr\": 0.004634063813539812,\n \"acc\": 0.46089483255174657,\n\ \ \"acc_stderr\": 0.011702308149823175\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.32246224832214765,\n \"em_stderr\": 0.004786806140711669,\n\ \ \"f1\": 0.3963055788590608,\n \"f1_stderr\": 0.004634063813539812\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.19408642911296436,\n \ \ \"acc_stderr\": 0.010893918308192417\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7277032359905288,\n \"acc_stderr\": 0.012510697991453932\n\ \ }\n}\n```" repo_url: https://huggingface.co/maywell/Synatra-V0.1-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|arc:challenge|25_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-10T04-30-58.971713.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_23T11_25_13.204412 path: - '**/details_harness|drop|3_2023-10-23T11-25-13.204412.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-23T11-25-13.204412.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_23T11_25_13.204412 path: - '**/details_harness|gsm8k|5_2023-10-23T11-25-13.204412.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-23T11-25-13.204412.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hellaswag|10_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-10T04-30-58.971713.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-management|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T04-30-58.971713.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_10T04_30_58.971713 path: - '**/details_harness|truthfulqa:mc|0_2023-10-10T04-30-58.971713.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-10T04-30-58.971713.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_23T11_25_13.204412 path: - '**/details_harness|winogrande|5_2023-10-23T11-25-13.204412.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-23T11-25-13.204412.parquet' - config_name: results data_files: - split: 2023_10_10T04_30_58.971713 path: - results_2023-10-10T04-30-58.971713.parquet - split: 2023_10_23T11_25_13.204412 path: - results_2023-10-23T11-25-13.204412.parquet - split: latest path: - results_2023-10-23T11-25-13.204412.parquet --- # Dataset Card for Evaluation run of maywell/Synatra-V0.1-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/maywell/Synatra-V0.1-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [maywell/Synatra-V0.1-7B](https://huggingface.co/maywell/Synatra-V0.1-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_maywell__Synatra-V0.1-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T11:25:13.204412](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__Synatra-V0.1-7B/blob/main/results_2023-10-23T11-25-13.204412.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.32246224832214765, "em_stderr": 0.004786806140711669, "f1": 0.3963055788590608, "f1_stderr": 0.004634063813539812, "acc": 0.46089483255174657, "acc_stderr": 0.011702308149823175 }, "harness|drop|3": { "em": 0.32246224832214765, "em_stderr": 0.004786806140711669, "f1": 0.3963055788590608, "f1_stderr": 0.004634063813539812 }, "harness|gsm8k|5": { "acc": 0.19408642911296436, "acc_stderr": 0.010893918308192417 }, "harness|winogrande|5": { "acc": 0.7277032359905288, "acc_stderr": 0.012510697991453932 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.27429530024528503, -0.5436317324638367, 0.201265349984169, 0.09411484748125076, -0.1285875290632248, 0.020079581066966057, -0.36312639713287354, -0.20967987179756165, 0.4171067178249359, 0.458480566740036, -0.7317966222763062, -0.9824095368385315, -0.697693943977356, 0.23541595041751862, -0.18163800239562988, 1.10349440574646, -0.25185105204582214, -0.17607320845127106, 0.08098408579826355, -0.28173863887786865, -0.2256210595369339, -0.38863635063171387, -0.44239020347595215, -0.5060328841209412, 0.43094685673713684, 0.6370527744293213, 0.2871268689632416, 0.6176964640617371, 0.6347787976264954, 0.3601419925689697, -0.10814487189054489, 0.14523403346538544, -0.4410531520843506, -0.05825790762901306, 0.24422375857830048, -0.6896569728851318, -0.7219862341880798, 0.09824283421039581, 0.696399450302124, 0.3872556984424591, -0.16808584332466125, 0.6488662958145142, 0.11799057573080063, 0.4351844787597656, -0.4369833171367645, 0.3874514698982239, -0.311517596244812, -0.07475683093070984, -0.48822852969169617, -0.14783543348312378, 0.04155569151043892, -0.3261505365371704, -0.24938589334487915, -0.5288739800453186, 0.08813796937465668, 0.17572981119155884, 1.0572621822357178, 0.21573516726493835, -0.22514018416404724, -0.10056992620229721, -0.29738375544548035, 0.8458873629570007, -0.993678092956543, -0.105538010597229, 0.5568420886993408, 0.30541783571243286, -0.3208094835281372, -0.6227899789810181, -0.4999154806137085, -0.08131425827741623, -0.28154218196868896, 0.1461852639913559, -0.13169340789318085, -0.05557731166481972, 0.526050865650177, 0.6565592288970947, -0.6545370221138, 0.05597308278083801, -0.5511927008628845, -0.20433193445205688, 0.9599538445472717, 0.4860793650150299, 0.09671752154827118, -0.5868066549301147, -0.29275551438331604, -0.3663006126880646, -0.3593769967556, 0.23299391567707062, 0.45645326375961304, 0.55319744348526, -0.6644530296325684, 0.8589240312576294, -0.5107996463775635, 0.46538764238357544, -0.16843701899051666, -0.3944227993488312, 0.8475202322006226, -0.6501966714859009, -0.23936279118061066, 0.027753667905926704, 1.065201759338379, 0.4008139669895172, -0.024677779525518417, 0.2542595863342285, -0.3023410737514496, 0.19464507699012756, -0.0065477159805595875, -0.7494419813156128, -0.12813609838485718, 0.4453526437282562, -0.6344740986824036, -0.401401549577713, 0.28244438767433167, -0.8409881591796875, -0.26380008459091187, -0.16655440628528595, 0.21198967099189758, -0.10162460803985596, -0.3779414892196655, -0.04330737888813019, -0.18721449375152588, 0.3706485629081726, 0.10954806953668594, -0.5871626138687134, 0.4364638030529022, 0.5864037275314331, 0.8991155028343201, 0.0333024263381958, -0.42460861802101135, -0.3624035716056824, -0.28223729133605957, 0.020183583721518517, 0.6025866866111755, -0.2495241016149521, -0.3452412784099579, -0.25081944465637207, 0.3316989541053772, -0.299966961145401, -0.5754721164703369, 0.6076667904853821, -0.1792527437210083, 0.19514909386634827, -0.3396492302417755, -0.37509140372276306, -0.1270817369222641, 0.44265615940093994, -0.6711291670799255, 1.2516791820526123, 0.36832183599472046, -0.9337831139564514, 0.07710446417331696, -0.837349534034729, -0.24215561151504517, -0.03888721019029617, 0.027807172387838364, -0.5800991058349609, -0.17009598016738892, 0.17591658234596252, 0.5125147104263306, -0.4068681597709656, -0.0326499305665493, -0.20603591203689575, -0.33318138122558594, 0.1600494086742401, -0.11169667541980743, 1.0451079607009888, 0.21664129197597504, -0.3465745151042938, 0.18302996456623077, -1.0100280046463013, 0.06704814732074738, 0.4020096957683563, -0.43905702233314514, -0.1607075035572052, -0.2572396993637085, 0.18233846127986908, 0.17015716433525085, 0.6001141667366028, -0.6617757678031921, 0.4136313199996948, -0.21371684968471527, 0.38638371229171753, 0.8247259855270386, 0.03984394669532776, 0.19754692912101746, -0.6654050350189209, 0.670974850654602, -0.0011353747686371207, 0.3649709224700928, 0.16605134308338165, -0.5493404269218445, -0.7543364763259888, -0.1146320328116417, 0.08069935441017151, 0.738662600517273, -0.38096359372138977, 0.601064920425415, -0.2869032025337219, -0.700870156288147, -0.745672881603241, 0.12139236181974411, 0.44648483395576477, 0.562504231929779, 0.393591970205307, -0.29480114579200745, -0.7439662218093872, -0.9777846932411194, 0.026166370138525963, -0.24154901504516602, 0.026627376675605774, 0.5143643021583557, 0.8751386404037476, -0.36438027024269104, 0.6348485946655273, -0.691248893737793, -0.32119953632354736, -0.34840285778045654, -0.06690854579210281, 0.8410296440124512, 0.3688831329345703, 0.35662224888801575, -0.5033189654350281, -0.2813158333301544, -0.010581770911812782, -0.7971805334091187, -0.3837233781814575, -0.10424238443374634, -0.22358983755111694, 0.1741660088300705, -0.10798349231481552, -0.48669782280921936, 0.5410303473472595, 0.5781372785568237, -0.6024183630943298, 0.7312021851539612, 0.07008153200149536, 0.3720652759075165, -1.268180012702942, 0.228839710354805, 0.12623560428619385, -0.021303478628396988, -0.44407933950424194, -0.17093512415885925, 0.005506392102688551, 0.3826553225517273, -0.42281481623649597, 0.635588526725769, -0.3257612884044647, -0.17472536861896515, 0.013748155906796455, 0.0435301773250103, -0.05642343685030937, 0.4299922585487366, -0.43486130237579346, 0.8106738924980164, 0.4357680380344391, -0.2553068697452545, 0.383766770362854, 0.5777394771575928, -0.4952784776687622, 0.20470835268497467, -0.4506639838218689, -0.0428164042532444, 0.16962343454360962, 0.2014848291873932, -0.9878326058387756, -0.3930824398994446, 0.46978840231895447, -0.4332984685897827, 0.1719653308391571, -0.22869642078876495, -0.533323347568512, -0.5395768284797668, -0.45463454723358154, 0.1968243420124054, 0.49225854873657227, -0.39529892802238464, 0.26558470726013184, 0.38495030999183655, 0.01758231222629547, -0.6576195955276489, -0.7169572710990906, -0.14689524471759796, -0.3346331715583801, -0.6915215253829956, 0.3701747953891754, 0.003135419450700283, -0.45104464888572693, 0.0329960472881794, -0.15221141278743744, 0.019594961777329445, 0.17631717026233673, 0.28980690240859985, 0.48272624611854553, -0.16332347691059113, -0.3804088532924652, -0.16975441575050354, -0.2616146206855774, 0.14361698925495148, 0.06131839007139206, 0.5682077407836914, -0.2527601420879364, -0.23901018500328064, -0.38186177611351013, 0.11242412030696869, 0.4771680235862732, -0.16988949477672577, 0.7791237831115723, 0.624912440776825, -0.2526457905769348, -0.06342964619398117, -0.4199959933757782, -0.03910675644874573, -0.480118066072464, 0.24306811392307281, -0.28593817353248596, -0.7981879115104675, 0.9636950492858887, 0.2139466553926468, 0.18043337762355804, 0.8247842192649841, 0.5465700626373291, 0.08794894069433212, 0.5654067397117615, 0.24311663210391998, -0.14900386333465576, 0.5941086411476135, -0.7877928018569946, -0.08447783440351486, -1.0910481214523315, -0.45114731788635254, -0.49022307991981506, -0.554012656211853, -0.8178129196166992, -0.30448034405708313, 0.3534122705459595, 0.06709865480661392, -0.3507425785064697, 0.564763069152832, -0.5866923332214355, 0.25956881046295166, 0.6213182806968689, 0.2962892949581146, 0.15477098524570465, -0.1489555984735489, -0.08584488928318024, 0.32142096757888794, -0.5200920701026917, -0.4949726164340973, 1.434439778327942, 0.2138921320438385, 0.7346090078353882, 0.0046163685619831085, 1.0655627250671387, 0.22054097056388855, 0.33323708176612854, -0.5770543217658997, 0.6046337485313416, -0.12938767671585083, -0.45577314496040344, -0.217051163315773, -0.5325100421905518, -1.0509878396987915, 0.1695798635482788, -0.06498263031244278, -0.9445865154266357, 0.03632419556379318, -0.027963805943727493, -0.19470755755901337, 0.341152548789978, -0.6049188375473022, 0.8901442289352417, -0.23043665289878845, -0.40294012427330017, -0.036458391696214676, -0.8709492683410645, 0.4222400188446045, 0.1480989158153534, 0.17101912200450897, -0.3079047203063965, -0.012275265529751778, 1.1024590730667114, -0.769223153591156, 0.8184254169464111, -0.20482152700424194, 0.15764279663562775, 0.3492773771286011, -0.39346379041671753, 0.5749231576919556, -0.1709713488817215, -0.2803289592266083, 0.45975539088249207, -0.09439104050397873, -0.3223523497581482, -0.2630130350589752, 0.809455931186676, -0.9648942351341248, -0.46798768639564514, -0.4149954915046692, -0.5493946671485901, 0.2941352128982544, 0.2478390634059906, 0.34561464190483093, 0.3957189917564392, 0.14224685728549957, 0.08002063632011414, 0.16001078486442566, -0.18568429350852966, 0.469464510679245, 0.3365381062030792, -0.10587015748023987, -0.6615942716598511, 0.6103009581565857, 0.3414611518383026, 0.06283937394618988, 0.26854097843170166, 0.22043536603450775, -0.580805778503418, -0.5186478495597839, -0.23423053324222565, 0.36424484848976135, -0.5759714841842651, -0.3377799093723297, -0.433813214302063, -0.3199836313724518, -0.40886780619621277, 0.03509131819009781, -0.3053595721721649, -0.46685636043548584, -0.49256572127342224, -0.22713156044483185, 0.6682736873626709, 0.7689007520675659, -0.23223266005516052, 0.3258857727050781, -0.7391573786735535, 0.20525428652763367, -0.15486717224121094, 0.3491286635398865, -0.10141293704509735, -0.5202366709709167, -0.3095146119594574, 0.012727299705147743, -0.44475606083869934, -1.0053082704544067, 0.6311066150665283, -0.11667892336845398, 0.7647891044616699, 0.07966889441013336, 0.18572446703910828, 0.7925880551338196, -0.1613152176141739, 0.9857682585716248, -0.013103977777063847, -0.673984944820404, 0.7615083456039429, -0.3354627788066864, 0.2728268802165985, 0.5269748568534851, 0.07833845913410187, -0.5256863236427307, -0.2748682200908661, -0.9499111771583557, -1.121222734451294, 0.9679844379425049, 0.48532670736312866, -0.3807791769504547, 0.10734576731920242, 0.25298190116882324, 0.011027321219444275, 0.20051980018615723, -0.618882954120636, -0.7961401343345642, -0.13693451881408691, -0.33401820063591003, -0.0928347110748291, -0.087554931640625, -0.35509347915649414, -0.38650500774383545, 0.7911081314086914, 0.04576420783996582, 0.4645426869392395, 0.21776515245437622, -0.06502845883369446, -0.1375531405210495, 0.41928473114967346, 0.5058676600456238, 0.6835952401161194, -0.35441988706588745, -0.1026478186249733, 0.2805255055427551, -0.572197437286377, 0.09855101257562637, 0.47673097252845764, 0.09625556319952011, -0.21038872003555298, 0.6237890720367432, 0.9072919487953186, 0.014378431253135204, -0.3454206585884094, 0.509233832359314, 0.0490301139652729, -0.302119642496109, -0.5030645132064819, 0.2252022624015808, -0.16050289571285248, 0.5607854723930359, 0.4713088572025299, -0.08894037455320358, 0.029051387682557106, -0.21659409999847412, 0.336430162191391, 0.23652862012386322, 0.09292149543762207, -0.3448866903781891, 0.6641970872879028, -0.038369789719581604, -0.38446858525276184, 0.8099895119667053, -0.13700227439403534, -0.6287895441055298, 1.1529592275619507, 0.4344758093357086, 0.9470441937446594, -0.09913937747478485, 0.03386637941002846, 0.5705511569976807, 0.3441386818885803, -0.07799285650253296, 0.7056354880332947, 0.1278701275587082, -0.6200750470161438, -0.2794192135334015, -0.773712694644928, -0.249298095703125, 0.4164135158061981, -0.9503356218338013, 0.4048638939857483, -0.23408439755439758, -0.299679696559906, -0.15423454344272614, 0.573254406452179, -0.8990873098373413, 0.03191952407360077, 0.15435224771499634, 0.9788002967834473, -1.2035644054412842, 0.6366917490959167, 0.853489875793457, -0.4510822892189026, -1.0297731161117554, -0.18075010180473328, 0.13883918523788452, -0.878129780292511, 0.30470943450927734, 0.3359026610851288, 0.391776978969574, -0.10058604925870895, -0.5528106689453125, -1.0197367668151855, 1.6472587585449219, 0.158439502120018, -0.6859618425369263, 0.21191997826099396, 0.05756554380059242, 0.41108593344688416, -0.256868451833725, 0.612718939781189, 0.747002899646759, 0.7462406754493713, -0.05511975288391113, -1.0538893938064575, 0.35488998889923096, -0.5353984832763672, -0.1859438270330429, 0.33052557706832886, -1.0284875631332397, 0.9871767163276672, -0.21040572226047516, -0.0748770609498024, 0.02349352464079857, 0.31069326400756836, 0.5624991655349731, 0.411915123462677, 0.44840285181999207, 0.6905065178871155, 0.8197180032730103, -0.3118200898170471, 1.203851580619812, -0.2769071161746979, 0.7111256122589111, 0.9805788397789001, -0.001222306746058166, 0.7363532185554504, 0.3998412489891052, -0.5678495764732361, 0.561226487159729, 0.8258156180381775, -0.3776586353778839, 0.37957778573036194, 0.2045687735080719, -0.07228535413742065, -0.07820725440979004, -0.0762806311249733, -0.4711432456970215, 0.45691144466400146, 0.25196781754493713, -0.5509989857673645, -0.28078389167785645, -0.3461438715457916, 0.01666252315044403, -0.2863924503326416, -0.20747524499893188, 0.5665678381919861, 0.035916004329919815, -0.41475141048431396, 0.6905725598335266, -0.10750378668308258, 0.6655017137527466, -0.6973065733909607, -0.09462521970272064, -0.25368139147758484, 0.17044657468795776, -0.6232932209968567, -0.9107005000114441, 0.16971150040626526, 0.0989830419421196, -0.2556830048561096, -0.20247074961662292, 0.6607049107551575, -0.25612708926200867, -0.5511952042579651, 0.3577690124511719, 0.4472857713699341, 0.34626102447509766, 0.22609618306159973, -0.863832950592041, 0.2707286775112152, 0.25922414660453796, -0.7539393901824951, 0.36263909935951233, 0.172560915350914, 0.024714481085538864, 0.5470477938652039, 0.6608863472938538, 0.14807426929473877, 0.07762542366981506, 0.07157307863235474, 1.1946827173233032, -0.8091481328010559, -0.39430275559425354, -0.8314968943595886, 0.9014621376991272, -0.1681986302137375, -0.6347810626029968, 0.8725040555000305, 0.8533457517623901, 0.789187490940094, 0.25995683670043945, 0.9027419090270996, -0.48746511340141296, 0.39113911986351013, -0.40179088711738586, 0.7724009156227112, -0.7548354268074036, 0.3930296003818512, -0.16122667491436005, -0.8161252737045288, -0.12776708602905273, 0.7237760424613953, -0.4265558123588562, -0.06728401780128479, 0.6567481160163879, 1.0426709651947021, 0.06268487125635147, 0.10897469520568848, -0.035982001572847366, 0.536304235458374, 0.25213584303855896, 0.7556489109992981, 0.730798602104187, -0.7020976543426514, 0.3846933841705322, -0.6269077658653259, -0.4222809672355652, -0.13827820122241974, -0.6934102773666382, -0.8069000244140625, -0.4280785024166107, -0.25956517457962036, -0.6029632687568665, -0.004484424367547035, 1.1527557373046875, 0.36199110746383667, -0.951034665107727, -0.44568273425102234, 0.06309091299772263, 0.18885774910449982, -0.2643807530403137, -0.3006932735443115, 0.4832579791545868, -0.09874046593904495, -0.7111480236053467, 0.35081684589385986, -0.2005675733089447, -0.08912330120801926, -0.05588632822036743, -0.1422358900308609, -0.3199779987335205, -0.4295560121536255, 0.29638761281967163, 0.2444002479314804, -0.7182410359382629, -0.27010485529899597, -0.09030302613973618, -0.06911197304725647, 0.33585482835769653, 0.3303886950016022, -0.613562285900116, 0.0844208225607872, 0.6195444464683533, 0.24110274016857147, 0.6621096730232239, 0.11560449749231339, 0.24438005685806274, -0.6816468834877014, 0.01715700700879097, 0.03809750825166702, 0.5118567943572998, 0.20470143854618073, -0.4894438087940216, 0.8581399321556091, 0.3675267696380615, -0.6693822741508484, -0.8328689336776733, -0.24721816182136536, -1.2013356685638428, 0.04669666290283203, 1.495453119277954, -0.1373305469751358, -0.4723261594772339, 0.09390375018119812, -0.1140175312757492, 0.43499091267585754, -0.7345712184906006, 0.5177356600761414, 0.6227384805679321, -0.32634082436561584, -0.008782279677689075, -0.5300279855728149, 0.2184150069952011, 0.016518542543053627, -0.927100419998169, -0.062315113842487335, 0.33878886699676514, 0.4227621257305145, 0.08731721341609955, 0.7203783988952637, 0.04171084612607956, -0.17225000262260437, 0.11301394551992416, 0.13365188241004944, -0.28339362144470215, -0.14977523684501648, -0.34683480858802795, 0.07273697108030319, -0.2726043462753296, -0.5300974249839783 ]
christykoh/imdb_pt
christykoh
2023-04-05T16:28:11Z
200
0
[ "region:us" ]
null
2023-04-05T16:27:52Z
--- dataset_info: features: - name: text dtype: string - name: label dtype: class_label: names: '0': negativo '1': positivo splits: - name: train num_bytes: 33225773 num_examples: 25000 - name: test num_bytes: 6503491 num_examples: 5000 - name: test_all num_bytes: 32638767 num_examples: 25000 download_size: 44980841 dataset_size: 72368031 --- # Dataset Card for "imdb_pt" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7334834933280945, -0.05956185609102249, 0.20820669829845428, 0.18807515501976013, -0.5286427736282349, 0.08163551986217499, 0.39942580461502075, -0.0128955552354455, 0.8920266628265381, 0.5583188533782959, -0.9078262448310852, -0.719331681728363, -0.924495279788971, -0.1298922598361969, -0.5440109968185425, 1.2330795526504517, 0.1132136806845665, 0.22622163593769073, -0.005113247781991959, -0.1815863400697708, -0.1723296344280243, -0.639228343963623, -0.512681782245636, -0.4630087614059448, 0.9084761738777161, 0.8084534406661987, 0.5137807130813599, 0.656064510345459, 1.0201178789138794, 0.08048012107610703, 0.33273303508758545, -0.20562876760959625, -0.37418875098228455, -0.07368341833353043, -0.365448921918869, -0.32612159848213196, -0.8711605668067932, -0.00922283623367548, 0.8419559597969055, 0.5789771676063538, 0.1875818967819214, 0.8562894463539124, -0.34924590587615967, 0.8335390686988831, -0.4549063742160797, 0.5142842531204224, -0.13975663483142853, 0.1096121072769165, -0.6674866676330566, -0.22428470849990845, 0.29650548100471497, -0.43000611662864685, -0.04452134668827057, -0.860279381275177, 0.10575578361749649, 0.06252975761890411, 0.9131019115447998, 0.1844433844089508, -0.19188883900642395, 0.08005126565694809, -0.4557815492153168, 0.039602458477020264, -0.24348945915699005, 0.19195139408111572, 0.8225311636924744, 0.35627198219299316, 0.22265265882015228, -0.49202534556388855, -0.3887594938278198, -0.0854346975684166, 0.10378631204366684, 0.07532333582639694, 0.1249789223074913, 0.0525207594037056, 0.655738890171051, 0.5742135047912598, -0.4075017273426056, -0.13445083796977997, -0.6370821595191956, -0.27172407507896423, 0.384105384349823, 0.023429006338119507, 0.15322080254554749, -0.2972717583179474, -0.032576750963926315, -0.45392879843711853, -0.6659404039382935, -0.15390582382678986, 0.5130070447921753, 0.017352113500237465, -0.963932454586029, 1.1983529329299927, -0.03856561705470085, 0.15532469749450684, 0.20011936128139496, 0.5564591288566589, 0.4459989666938782, -0.2868608832359314, -0.07360269874334335, 0.31938326358795166, 0.3375784456729889, 0.7249014377593994, 0.385392427444458, 0.31327080726623535, 0.13836604356765747, 0.15541771054267883, 0.084536612033844, -1.0429928302764893, -0.7749925255775452, 0.30220213532447815, -0.5979437828063965, 0.006254503037780523, 0.6089487075805664, -0.9775499701499939, -0.5743473768234253, -0.3787960410118103, 0.29774054884910583, -0.38312196731567383, -0.7537441849708557, -0.553260326385498, -0.832777202129364, 0.5625542998313904, 0.0774313360452652, -0.718174159526825, -0.037555672228336334, 0.767331063747406, 0.6893193125724792, 0.2833797037601471, -0.379611074924469, -0.8165029287338257, 0.5423382520675659, 0.018444523215293884, 1.0506237745285034, -0.5964825749397278, -0.46817073225975037, 0.003450835356488824, 0.37879088521003723, 0.01718989387154579, -0.304089218378067, 0.9075936675071716, -0.09704902768135071, -0.10991569608449936, -0.6877402663230896, -0.3870466947555542, -0.04666886478662491, 0.45692387223243713, -1.04204261302948, 1.0412909984588623, 0.563082754611969, -0.8273324370384216, 0.5623355507850647, -1.2809780836105347, -0.19321037828922272, 0.2207079976797104, -0.014593355357646942, -0.4841979146003723, 0.311418741941452, 0.06323209404945374, 0.46420782804489136, -0.08826011419296265, 0.5134581327438354, -0.897686243057251, -0.3880552649497986, 0.43626609444618225, 0.1549050658941269, 0.6338120698928833, 0.46427786350250244, 0.297685831785202, -0.12041053175926208, -1.0285468101501465, -0.321195513010025, 0.029347054660320282, -0.23631218075752258, -0.31876933574676514, -0.37427976727485657, 0.3024669885635376, 0.0003370339691173285, 0.1642533242702484, -0.4147728979587555, 0.3231419324874878, 0.15634627640247345, -0.19370301067829132, 0.702312171459198, 0.19627811014652252, 0.20099744200706482, -0.5293325185775757, 0.5421013236045837, 0.19074133038520813, 0.2348000556230545, -0.055182475596666336, -0.23441383242607117, -0.6724185347557068, -0.14669620990753174, 0.4621810019016266, 0.70974200963974, -0.48930928111076355, 0.51466965675354, -0.034583866596221924, -0.5185021758079529, -0.3310289680957794, -0.020022839307785034, 0.3338383734226227, 0.2164229154586792, 0.178554505109787, -0.8573214411735535, -0.9880765080451965, -0.7474789023399353, 0.2104025036096573, -0.3035576045513153, -0.10124710202217102, 0.32627421617507935, 0.6865049600601196, -0.5566485524177551, 0.6773326396942139, -0.4264815151691437, -0.4679013788700104, 0.0471784770488739, -0.21547135710716248, 0.5072152018547058, 0.9229404926300049, 0.8338244557380676, -0.5431407690048218, -0.46032604575157166, -0.4859560430049896, -0.6797592639923096, -0.3817841410636902, 0.34959036111831665, -0.5817947387695312, -0.3410758972167969, 0.11974728852510452, -0.41999173164367676, 0.38044530153274536, 0.8855413198471069, -0.7904630899429321, 0.27407345175743103, 0.026844913139939308, 0.5370278358459473, -1.256592035293579, 0.4708259403705597, 0.1732340008020401, -0.11507309228181839, -0.23688285052776337, -0.1869727224111557, 0.025067441165447235, -0.408978134393692, -0.23380553722381592, 0.47246941924095154, -0.250962495803833, -0.3042469322681427, 0.09098763763904572, -0.01901344396173954, 0.027897635474801064, 0.38322848081588745, 0.0683078020811081, 0.37920594215393066, 1.104960322380066, -0.5298787951469421, 0.7716186046600342, 0.5248355269432068, -0.08344053477048874, 1.3084644079208374, -0.6003602743148804, -0.0718245580792427, -0.38912269473075867, 0.14776360988616943, -0.705763041973114, -0.762859046459198, 0.6116493344306946, -0.22970645129680634, 0.4996928572654724, -0.986213743686676, -0.6441630125045776, -0.5335657000541687, -0.5254352688789368, 0.5487987399101257, 0.283096045255661, -0.6567296981811523, 0.2870712876319885, 0.9173458218574524, -0.16105645895004272, -0.14821897447109222, -0.824118971824646, 0.12223827093839645, -0.5209295749664307, 0.008598001673817635, 0.48021385073661804, -0.30674323439598083, -0.17622070014476776, -0.1710238754749298, 0.3960196077823639, -0.4377535879611969, -0.30437493324279785, 0.5839658379554749, 0.1694701611995697, -0.16700270771980286, 0.3001023232936859, 0.28467491269111633, -0.7568516135215759, 0.10093116760253906, -0.059481699019670486, 0.4152619242668152, 0.09282885491847992, -0.22355671226978302, -0.3630150556564331, 0.7629982233047485, 0.4250298738479614, -0.18376919627189636, 0.4333781898021698, 1.263907790184021, -0.4941381812095642, 0.14365601539611816, -0.5062479376792908, -0.09048699587583542, -0.4052816927433014, 0.17480027675628662, -0.3095780313014984, -0.5715190172195435, 0.544533371925354, -0.27711600065231323, -0.20861904323101044, 0.7674394845962524, 0.8120424747467041, -0.2259162813425064, 0.6948646306991577, 0.7830913066864014, -0.15737703442573547, 0.7263206243515015, -0.4203294813632965, -0.32906070351600647, -0.8447962999343872, -0.2951500117778778, -0.587493360042572, -0.47994738817214966, -0.6075823903083801, -0.4539596140384674, 0.3078778088092804, -0.011552540585398674, -0.3631584942340851, 0.6737964749336243, -0.6646400690078735, 0.4120699167251587, 0.9223992824554443, 0.10263592004776001, 0.14515726268291473, -0.1287752091884613, 0.10245107114315033, 0.19793027639389038, -0.4768635034561157, -0.28510162234306335, 1.3287670612335205, 0.4856296479701996, 1.0755915641784668, 0.3170620799064636, 0.6595461964607239, 0.48230117559432983, 0.30306369066238403, -0.16528171300888062, 0.31967249512672424, -0.09035810083150864, -0.7984297275543213, 0.10414992272853851, 0.0731806606054306, -0.5698364973068237, -0.2886744439601898, -0.3820006847381592, -0.23340055346488953, 0.7295041680335999, 0.3621023893356323, -0.009462149813771248, 0.1419638991355896, -1.003889560699463, 0.9313435554504395, -0.22523903846740723, -0.02755478397011757, -0.3924851417541504, -0.6904361248016357, 0.2592792212963104, -0.06935697793960571, -0.07316500693559647, -0.6025214195251465, -0.022963449358940125, 1.0440692901611328, -0.5353758335113525, 1.00974440574646, -0.7302107810974121, 0.2899097204208374, 0.24638044834136963, -0.0914127305150032, 0.17409121990203857, 0.6808435320854187, -0.3384006917476654, 0.19433097541332245, 0.47903138399124146, -0.3361324667930603, 0.1022033542394638, 0.9054778814315796, -0.7766227722167969, 0.2948945462703705, -0.6647066473960876, -0.5755836367607117, 0.01845138892531395, 0.3607715368270874, 0.175391286611557, 0.653666615486145, -0.4129469394683838, 0.04525994509458542, 0.6866375803947449, 0.11925560235977173, 0.4369461238384247, 0.34462040662765503, -0.1858157217502594, -0.7293533086776733, 0.772732138633728, 0.2245865911245346, -0.4822272062301636, 0.518450915813446, 0.21093815565109253, -0.4118165671825409, -0.41404029726982117, -0.47674617171287537, 0.39657485485076904, -0.529488205909729, -0.4137119948863983, -0.47377076745033264, -0.15887875854969025, -0.4474979341030121, -0.08462175726890564, -0.25502800941467285, -0.7705464959144592, -0.5961491465568542, -0.6757121086120605, 1.1230331659317017, 0.3304796814918518, -0.6878313422203064, 0.7974215745925903, -0.880424439907074, 0.6384971141815186, -0.057945188134908676, 1.1617016792297363, -0.64101642370224, -0.41634130477905273, -0.6534737944602966, -0.19949303567409515, -0.15988987684249878, -0.6452032327651978, 0.19338437914848328, 0.5730128288269043, 0.677765429019928, 0.3305110037326813, 0.08845331519842148, 0.7117823958396912, -0.0658828541636467, 0.5593562722206116, 0.11979268491268158, -0.7040746808052063, 0.8488665223121643, -0.3074711561203003, 0.3799820840358734, 1.098111629486084, 0.5653966665267944, -0.2403663694858551, 0.04737392067909241, -0.8940567374229431, -0.4742449223995209, 0.6416486501693726, 0.01240178756415844, 0.044123224914073944, 0.18589773774147034, 0.38494932651519775, 0.16852645576000214, 0.5290372371673584, -0.9735296368598938, -0.5132537484169006, -0.302267462015152, -0.3001004755496979, 0.08647101372480392, -0.599905788898468, -0.3583093583583832, -0.6876032948493958, 0.6862695813179016, -0.15965189039707184, 0.6701142191886902, 0.2766842544078827, 0.0963466688990593, -0.4326156973838806, -0.14829380810260773, 0.10433841496706009, 0.581046462059021, -0.9016053676605225, 0.052449874579906464, -0.41898107528686523, -0.308453232049942, -0.3498457074165344, 0.6600989699363708, 0.2578240633010864, -0.04230870306491852, 0.35626420378685, 0.755532443523407, -0.3539268970489502, -0.596708357334137, 0.2755591869354248, -0.09096667170524597, -0.2753152549266815, -0.7618802785873413, 0.15731292963027954, -0.16585221886634827, 0.29784896969795227, 0.13745851814746857, -0.1532382071018219, 0.16593818366527557, -0.6884380578994751, 0.6124842762947083, 0.05555495247244835, -0.7945951223373413, -0.4565519392490387, 0.5375860929489136, 0.28136971592903137, -0.3367008864879608, 0.840216338634491, 0.0328349694609642, -0.31659001111984253, 0.5700777173042297, 0.2692214846611023, 0.8461407423019409, -0.3371735215187073, 0.40507301688194275, 0.5960037708282471, 0.18120872974395752, 0.30610114336013794, 0.6623199582099915, -0.40654614567756653, -0.458487331867218, -0.2311963587999344, -0.42286938428878784, -0.7949464917182922, -0.4393145442008972, -0.625434398651123, 0.5276132822036743, -0.8485188484191895, -0.3138805329799652, -0.25055626034736633, 0.12664197385311127, -0.6711045503616333, 0.11169056594371796, 0.22608444094657898, 1.148122787475586, -0.8554458022117615, 1.3454535007476807, 0.6432331800460815, -0.5882768034934998, -0.5341299176216125, -0.39224758744239807, -0.01594911888241768, -0.8043016791343689, 0.09590702503919601, 0.3420605957508087, 0.6446991562843323, -0.08451849967241287, -0.682239294052124, -0.9605970978736877, 1.2323253154754639, 0.43116816878318787, -0.5148804187774658, 0.288851261138916, 0.004876805003732443, 0.4597164988517761, -0.539391040802002, 0.38383540511131287, 0.6612011194229126, 0.929573118686676, 0.2072151154279709, -0.6703503727912903, -0.25043192505836487, -0.6208809018135071, -0.5104284882545471, 0.4321816861629486, -0.45291393995285034, 0.2760980427265167, -0.14382995665073395, 0.15469031035900116, -0.0348474346101284, 0.4262572228908539, 0.30839964747428894, 0.6246457695960999, 0.3872406780719757, 0.7128205895423889, 0.6928355097770691, -0.44128212332725525, 1.0014392137527466, -0.09394483268260956, 0.3627591133117676, 0.9873127341270447, -0.15468449890613556, 0.17686645686626434, 0.755990207195282, -0.14715492725372314, 0.4345260262489319, 0.6990563273429871, -0.7487322688102722, 0.5976319909095764, 0.16585512459278107, -0.0011732402490451932, -0.06292425841093063, -0.454532265663147, -0.6985850930213928, 0.1442583203315735, 0.7583199739456177, -0.6379730701446533, -0.11862166970968246, 0.027937326580286026, -0.030789047479629517, -0.2568155527114868, -0.4764116406440735, 0.6823286414146423, 0.17760448157787323, -0.4476015269756317, -0.08047421276569366, -0.045056864619255066, 0.30304068326950073, -1.0186004638671875, -0.45103660225868225, -0.027389315888285637, 0.14614993333816528, -0.4508129954338074, -1.1390056610107422, 0.8617162704467773, -0.14766880869865417, -0.3592624366283417, -0.030191069468855858, 0.48293930292129517, -0.5439278483390808, -0.8527163863182068, 0.20162229239940643, 0.14166802167892456, 0.21419408917427063, 0.014874054118990898, -1.3692989349365234, 0.2796611785888672, -0.16959144175052643, 0.1705332249403, 0.3102458119392395, 0.15617285668849945, 0.14876261353492737, 0.7100028395652771, 0.8385543823242188, 0.049971286207437515, -0.21282202005386353, 0.31040799617767334, 0.7988436222076416, -0.8049277663230896, -0.581047773361206, -0.591801106929779, 0.7040440440177917, -0.2931765019893646, -0.5350823402404785, 0.5104420781135559, 0.896665632724762, 0.7773241400718689, -0.31274527311325073, 0.5126295685768127, -0.641033411026001, 0.7381198406219482, -0.33997976779937744, 0.380054235458374, -0.17189736664295197, 0.08878850936889648, -0.5212652087211609, -1.13236403465271, -0.8582481145858765, 0.5536355972290039, 0.15984241664409637, -0.2504643499851227, 0.6141383647918701, 1.026370644569397, -0.383009672164917, -0.19303670525550842, -0.013637932017445564, 0.1948472112417221, 0.41795581579208374, 0.4085076153278351, 0.5099252462387085, -0.45903265476226807, 0.3243451416492462, -0.475089967250824, -0.48416203260421753, -0.10006862133741379, -1.294695258140564, -0.8086965680122375, -0.6435983777046204, -0.7386832237243652, -0.5276030898094177, -0.1278541386127472, 1.146103024482727, 0.9376332759857178, -0.9812350869178772, -0.4681888818740845, 0.2290826290845871, 0.43184131383895874, -0.18981358408927917, -0.1411440670490265, 0.8017240762710571, 0.7509764432907104, -0.8748070001602173, -0.10499417781829834, 0.21883541345596313, 0.19191089272499084, -0.1720123440027237, 0.026118868961930275, -0.25984516739845276, -0.18486537039279938, 0.3405914902687073, 0.5665377378463745, -0.014170604757964611, -0.24126343429088593, -0.8117077350616455, 0.193263441324234, 0.038426611572504044, 1.0510526895523071, -0.5710397362709045, 0.27576684951782227, 0.5515854358673096, 0.22195899486541748, 0.7026371955871582, 0.1246643140912056, 0.7812368273735046, -0.609889030456543, 0.19539009034633636, -0.08976364135742188, 0.7541040182113647, 0.1968221813440323, -0.5236380696296692, 0.9984848499298096, 0.5168070197105408, -0.5292156934738159, -0.6841524243354797, -0.04312945902347565, -1.7727885246276855, 0.49180740118026733, 0.7504987716674805, 0.25377485156059265, -0.24910637736320496, -0.22025270760059357, -0.5945571064949036, 0.21263393759727478, -0.9560077786445618, 0.2522985637187958, 0.45473751425743103, 0.023965660482645035, -0.6421793699264526, -0.024939483031630516, 0.40656647086143494, -0.3475845158100128, -0.7691269516944885, -0.06677008420228958, 0.5989829897880554, 0.1558517962694168, 0.006735134404152632, 0.7841917276382446, -0.5317677855491638, 0.22643771767616272, 0.03326905891299248, 0.19061285257339478, -0.34817445278167725, -0.42635872960090637, -0.2579389214515686, 0.1694948524236679, 0.09832803905010223, -0.3672042787075043 ]
dsfsi/vukuzenzele-sentence-aligned
dsfsi
2023-11-27T11:28:54Z
200
0
[ "task_categories:sentence-similarity", "task_categories:translation", "language:eng", "language:afr", "language:nbl", "language:xho", "language:zul", "language:sot", "language:nso", "language:tsn", "language:ssw", "language:ven", "language:tso", "license:cc-by-4.0", "multilingual", "government", "arxiv:2303.03750", "region:us" ]
[ "sentence-similarity", "translation" ]
2023-07-03T15:38:24Z
--- language: - eng - afr - nbl - xho - zul - sot - nso - tsn - ssw - ven - tso license: cc-by-4.0 task_categories: - sentence-similarity - translation pretty_name: The Vuk'uzenzele South African Multilingual Corpus tags: - multilingual - government arxiv: 2303.0375 configs: - config_name: afr-eng data_files: - split: train path: afr-eng/train-* - split: test path: afr-eng/test-* - split: eval path: afr-eng/eval-* - config_name: afr-nbl data_files: - split: train path: afr-nbl/train-* - split: test path: afr-nbl/test-* - split: eval path: afr-nbl/eval-* - config_name: afr-nso data_files: - split: train path: afr-nso/train-* - split: test path: afr-nso/test-* - split: eval path: afr-nso/eval-* - config_name: afr-sot data_files: - split: train path: afr-sot/train-* - split: test path: afr-sot/test-* - split: eval path: afr-sot/eval-* - config_name: afr-ssw data_files: - split: train path: afr-ssw/train-* - split: test path: afr-ssw/test-* - split: eval path: afr-ssw/eval-* - config_name: afr-tsn data_files: - split: train path: afr-tsn/train-* - split: test path: afr-tsn/test-* - split: eval path: afr-tsn/eval-* - config_name: afr-tso data_files: - split: train path: afr-tso/train-* - split: test path: afr-tso/test-* - split: eval path: afr-tso/eval-* - config_name: afr-ven data_files: - split: train path: afr-ven/train-* - split: test path: afr-ven/test-* - split: eval path: afr-ven/eval-* - config_name: afr-xho data_files: - split: train path: afr-xho/train-* - split: test path: afr-xho/test-* - split: eval path: afr-xho/eval-* - config_name: afr-zul data_files: - split: train path: afr-zul/train-* - split: test path: afr-zul/test-* - split: eval path: afr-zul/eval-* - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* - config_name: eng-nbl data_files: - split: train path: eng-nbl/train-* - split: test path: eng-nbl/test-* - split: eval path: eng-nbl/eval-* - config_name: eng-nso data_files: - split: train path: eng-nso/train-* - split: test path: eng-nso/test-* - split: eval path: eng-nso/eval-* - config_name: eng-sot data_files: - split: train path: eng-sot/train-* - split: test path: eng-sot/test-* - split: eval path: eng-sot/eval-* - config_name: eng-ssw data_files: - split: train path: eng-ssw/train-* - split: test path: eng-ssw/test-* - split: eval path: eng-ssw/eval-* - config_name: eng-tsn data_files: - split: train path: eng-tsn/train-* - split: test path: eng-tsn/test-* - split: eval path: eng-tsn/eval-* - config_name: eng-tso data_files: - split: train path: eng-tso/train-* - split: test path: eng-tso/test-* - split: eval path: eng-tso/eval-* - config_name: eng-ven data_files: - split: train path: eng-ven/train-* - split: test path: eng-ven/test-* - split: eval path: eng-ven/eval-* - config_name: eng-xho data_files: - split: train path: eng-xho/train-* - split: test path: eng-xho/test-* - split: eval path: eng-xho/eval-* - config_name: eng-zul data_files: - split: train path: eng-zul/train-* - split: test path: eng-zul/test-* - split: eval path: eng-zul/eval-* - config_name: nbl-nso data_files: - split: train path: nbl-nso/train-* - split: test path: nbl-nso/test-* - split: eval path: nbl-nso/eval-* - config_name: nbl-sot data_files: - split: train path: nbl-sot/train-* - split: test path: nbl-sot/test-* - split: eval path: nbl-sot/eval-* - config_name: nbl-ssw data_files: - split: train path: nbl-ssw/train-* - split: test path: nbl-ssw/test-* - split: eval path: nbl-ssw/eval-* - config_name: nbl-tsn data_files: - split: train path: nbl-tsn/train-* - split: test path: nbl-tsn/test-* - split: eval path: nbl-tsn/eval-* - config_name: nbl-tso data_files: - split: train path: nbl-tso/train-* - split: test path: nbl-tso/test-* - split: eval path: nbl-tso/eval-* - config_name: nbl-ven data_files: - split: train path: nbl-ven/train-* - split: test path: nbl-ven/test-* - split: eval path: nbl-ven/eval-* - config_name: nbl-xho data_files: - split: train path: nbl-xho/train-* - split: test path: nbl-xho/test-* - split: eval path: nbl-xho/eval-* - config_name: nbl-zul data_files: - split: train path: nbl-zul/train-* - split: test path: nbl-zul/test-* - split: eval path: nbl-zul/eval-* - config_name: nso-sot data_files: - split: train path: nso-sot/train-* - split: test path: nso-sot/test-* - split: eval path: nso-sot/eval-* - config_name: nso-ssw data_files: - split: train path: nso-ssw/train-* - split: test path: nso-ssw/test-* - split: eval path: nso-ssw/eval-* - config_name: nso-tsn data_files: - split: train path: nso-tsn/train-* - split: test path: nso-tsn/test-* - split: eval path: nso-tsn/eval-* - config_name: nso-tso data_files: - split: train path: nso-tso/train-* - split: test path: nso-tso/test-* - split: eval path: nso-tso/eval-* - config_name: nso-ven data_files: - split: train path: nso-ven/train-* - split: test path: nso-ven/test-* - split: eval path: nso-ven/eval-* - config_name: nso-xho data_files: - split: train path: nso-xho/train-* - split: test path: nso-xho/test-* - split: eval path: nso-xho/eval-* - config_name: nso-zul data_files: - split: train path: nso-zul/train-* - split: test path: nso-zul/test-* - split: eval path: nso-zul/eval-* - config_name: sot-ssw data_files: - split: train path: sot-ssw/train-* - split: test path: sot-ssw/test-* - split: eval path: sot-ssw/eval-* - config_name: sot-tsn data_files: - split: train path: sot-tsn/train-* - split: test path: sot-tsn/test-* - split: eval path: sot-tsn/eval-* - config_name: sot-tso data_files: - split: train path: sot-tso/train-* - split: test path: sot-tso/test-* - split: eval path: sot-tso/eval-* - config_name: sot-ven data_files: - split: train path: sot-ven/train-* - split: test path: sot-ven/test-* - split: eval path: sot-ven/eval-* - config_name: sot-xho data_files: - split: train path: sot-xho/train-* - split: test path: sot-xho/test-* - split: eval path: sot-xho/eval-* - config_name: sot-zul data_files: - split: train path: sot-zul/train-* - split: test path: sot-zul/test-* - split: eval path: sot-zul/eval-* - config_name: ssw-tsn data_files: - split: train path: ssw-tsn/train-* - split: test path: ssw-tsn/test-* - split: eval path: ssw-tsn/eval-* - config_name: ssw-tso data_files: - split: train path: ssw-tso/train-* - split: test path: ssw-tso/test-* - split: eval path: ssw-tso/eval-* - config_name: ssw-ven data_files: - split: train path: ssw-ven/train-* - split: test path: ssw-ven/test-* - split: eval path: ssw-ven/eval-* - config_name: ssw-xho data_files: - split: train path: ssw-xho/train-* - split: test path: ssw-xho/test-* - split: eval path: ssw-xho/eval-* - config_name: ssw-zul data_files: - split: train path: ssw-zul/train-* - split: test path: ssw-zul/test-* - split: eval path: ssw-zul/eval-* - config_name: tsn-tso data_files: - split: train path: tsn-tso/train-* - split: test path: tsn-tso/test-* - split: eval path: tsn-tso/eval-* - config_name: tsn-ven data_files: - split: train path: tsn-ven/train-* - split: test path: tsn-ven/test-* - split: eval path: tsn-ven/eval-* - config_name: tsn-xho data_files: - split: train path: tsn-xho/train-* - split: test path: tsn-xho/test-* - split: eval path: tsn-xho/eval-* - config_name: tsn-zul data_files: - split: train path: tsn-zul/train-* - split: test path: tsn-zul/test-* - split: eval path: tsn-zul/eval-* - config_name: tso-ven data_files: - split: train path: tso-ven/train-* - split: test path: tso-ven/test-* - split: eval path: tso-ven/eval-* - config_name: tso-xho data_files: - split: train path: tso-xho/train-* - split: test path: tso-xho/test-* - split: eval path: tso-xho/eval-* - config_name: tso-zul data_files: - split: train path: tso-zul/train-* - split: test path: tso-zul/test-* - split: eval path: tso-zul/eval-* - config_name: ven-xho data_files: - split: train path: ven-xho/train-* - split: test path: ven-xho/test-* - split: eval path: ven-xho/eval-* - config_name: ven-zul data_files: - split: train path: ven-zul/train-* - split: test path: ven-zul/test-* - split: eval path: ven-zul/eval-* - config_name: xho-zul data_files: - split: train path: xho-zul/train-* - split: test path: xho-zul/test-* - split: eval path: xho-zul/eval-* dataset_info: - config_name: afr-eng features: - name: afr dtype: string - name: eng dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 793530 num_examples: 2660 - name: test num_bytes: 171644 num_examples: 570 - name: eval num_bytes: 172132 num_examples: 571 download_size: 757198 dataset_size: 1137306 - config_name: afr-nbl features: - name: afr dtype: string - name: nbl dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 281328 num_examples: 723 - name: test num_bytes: 57947 num_examples: 155 - name: eval num_bytes: 59996 num_examples: 155 download_size: 279950 dataset_size: 399271 - config_name: afr-nso features: - name: afr dtype: string - name: nso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 980475 num_examples: 2937 - name: test num_bytes: 203451 num_examples: 630 - name: eval num_bytes: 214623 num_examples: 630 download_size: 892392 dataset_size: 1398549 - config_name: afr-sot features: - name: afr dtype: string - name: sot dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 971841 num_examples: 2968 - name: test num_bytes: 211374 num_examples: 636 - name: eval num_bytes: 209697 num_examples: 636 download_size: 901006 dataset_size: 1392912 - config_name: afr-ssw features: - name: afr dtype: string - name: ssw dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 975744 num_examples: 3042 - name: test num_bytes: 209151 num_examples: 652 - name: eval num_bytes: 208877 num_examples: 653 download_size: 927666 dataset_size: 1393772 - config_name: afr-tsn features: - name: afr dtype: string - name: tsn dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1010506 num_examples: 2830 - name: test num_bytes: 218153 num_examples: 607 - name: eval num_bytes: 214373 num_examples: 607 download_size: 913596 dataset_size: 1443032 - config_name: afr-tso features: - name: afr dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 980025 num_examples: 2952 - name: test num_bytes: 213355 num_examples: 633 - name: eval num_bytes: 211642 num_examples: 633 download_size: 902666 dataset_size: 1405022 - config_name: afr-ven features: - name: afr dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 352031 num_examples: 830 - name: test num_bytes: 72702 num_examples: 178 - name: eval num_bytes: 75243 num_examples: 178 download_size: 323825 dataset_size: 499976 - config_name: afr-xho features: - name: afr dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 986062 num_examples: 3098 - name: test num_bytes: 205229 num_examples: 664 - name: eval num_bytes: 210379 num_examples: 665 download_size: 944334 dataset_size: 1401670 - config_name: afr-zul features: - name: afr dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 982102 num_examples: 3078 - name: test num_bytes: 208473 num_examples: 660 - name: eval num_bytes: 201824 num_examples: 660 download_size: 932565 dataset_size: 1392399 - config_name: default features: - name: nbl dtype: string - name: nso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 128131 num_examples: 315 - name: test num_bytes: 31826 num_examples: 79 download_size: 113394 dataset_size: 159957 - config_name: eng-nbl features: - name: eng dtype: string - name: nbl dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 78419 num_examples: 207 - name: test num_bytes: 16930 num_examples: 45 - name: eval num_bytes: 15202 num_examples: 45 download_size: 89654 dataset_size: 110551 - config_name: eng-nso features: - name: eng dtype: string - name: nso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 848347 num_examples: 2616 - name: test num_bytes: 183267 num_examples: 561 - name: eval num_bytes: 181802 num_examples: 561 download_size: 770909 dataset_size: 1213416 - config_name: eng-sot features: - name: eng dtype: string - name: sot dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 843173 num_examples: 2671 - name: test num_bytes: 181709 num_examples: 573 - name: eval num_bytes: 180602 num_examples: 573 download_size: 776145 dataset_size: 1205484 - config_name: eng-ssw features: - name: eng dtype: string - name: ssw dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 830354 num_examples: 2662 - name: test num_bytes: 175688 num_examples: 571 - name: eval num_bytes: 176734 num_examples: 571 download_size: 777951 dataset_size: 1182776 - config_name: eng-tsn features: - name: eng dtype: string - name: tsn dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 880306 num_examples: 2517 - name: test num_bytes: 190843 num_examples: 539 - name: eval num_bytes: 187728 num_examples: 540 download_size: 786563 dataset_size: 1258877 - config_name: eng-tso features: - name: eng dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 849914 num_examples: 2623 - name: test num_bytes: 181181 num_examples: 562 - name: eval num_bytes: 176362 num_examples: 563 download_size: 773662 dataset_size: 1207457 - config_name: eng-ven features: - name: eng dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 113647 num_examples: 279 - name: test num_bytes: 26195 num_examples: 60 - name: eval num_bytes: 26121 num_examples: 60 download_size: 119271 dataset_size: 165963 - config_name: eng-xho features: - name: eng dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 827201 num_examples: 2662 - name: test num_bytes: 175023 num_examples: 571 - name: eval num_bytes: 176047 num_examples: 571 download_size: 784961 dataset_size: 1178271 - config_name: eng-zul features: - name: eng dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 815837 num_examples: 2646 - name: test num_bytes: 168769 num_examples: 567 - name: eval num_bytes: 177547 num_examples: 567 download_size: 767836 dataset_size: 1162153 - config_name: nbl-nso features: - name: nbl dtype: string - name: nso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 109438 num_examples: 275 - name: test num_bytes: 24000 num_examples: 59 - name: eval num_bytes: 26519 num_examples: 60 download_size: 118816 dataset_size: 159957 - config_name: nbl-sot features: - name: nbl dtype: string - name: sot dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 149593 num_examples: 365 - name: test num_bytes: 30656 num_examples: 78 - name: eval num_bytes: 32211 num_examples: 79 download_size: 152576 dataset_size: 212460 - config_name: nbl-ssw features: - name: nbl dtype: string - name: ssw dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 146544 num_examples: 387 - name: test num_bytes: 33410 num_examples: 83 - name: eval num_bytes: 32858 num_examples: 84 download_size: 157314 dataset_size: 212812 - config_name: nbl-tsn features: - name: nbl dtype: string - name: tsn dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 131856 num_examples: 302 - name: test num_bytes: 31961 num_examples: 65 - name: eval num_bytes: 29676 num_examples: 65 download_size: 139315 dataset_size: 193493 - config_name: nbl-tso features: - name: nbl dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 127433 num_examples: 296 - name: test num_bytes: 24654 num_examples: 63 - name: eval num_bytes: 23290 num_examples: 64 download_size: 127532 dataset_size: 175377 - config_name: nbl-ven features: - name: nbl dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 976911 num_examples: 2660 - name: test num_bytes: 211536 num_examples: 570 - name: eval num_bytes: 207694 num_examples: 570 download_size: 885066 dataset_size: 1396141 - config_name: nbl-xho features: - name: nbl dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 149020 num_examples: 403 - name: test num_bytes: 33319 num_examples: 87 - name: eval num_bytes: 31809 num_examples: 87 download_size: 160427 dataset_size: 214148 - config_name: nbl-zul features: - name: nbl dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 111905 num_examples: 289 - name: test num_bytes: 25799 num_examples: 62 - name: eval num_bytes: 22660 num_examples: 63 download_size: 124588 dataset_size: 160364 - config_name: nso-sot features: - name: nso dtype: string - name: sot dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1057851 num_examples: 3052 - name: test num_bytes: 226420 num_examples: 654 - name: eval num_bytes: 232934 num_examples: 655 download_size: 945243 dataset_size: 1517205 - config_name: nso-ssw features: - name: nso dtype: string - name: ssw dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1020738 num_examples: 2968 - name: test num_bytes: 219932 num_examples: 636 - name: eval num_bytes: 218161 num_examples: 637 download_size: 922981 dataset_size: 1458831 - config_name: nso-tsn features: - name: nso dtype: string - name: tsn dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1092885 num_examples: 2918 - name: test num_bytes: 238439 num_examples: 625 - name: eval num_bytes: 234644 num_examples: 626 download_size: 952272 dataset_size: 1565968 - config_name: nso-tso features: - name: nso dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1045959 num_examples: 2956 - name: test num_bytes: 228677 num_examples: 634 - name: eval num_bytes: 226626 num_examples: 634 download_size: 925262 dataset_size: 1501262 - config_name: nso-ven features: - name: nso dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 155499 num_examples: 343 - name: test num_bytes: 35576 num_examples: 73 - name: eval num_bytes: 31381 num_examples: 74 download_size: 152424 dataset_size: 222456 - config_name: nso-xho features: - name: nso dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1022670 num_examples: 2959 - name: test num_bytes: 214850 num_examples: 634 - name: eval num_bytes: 212932 num_examples: 635 download_size: 929486 dataset_size: 1450452 - config_name: nso-zul features: - name: nso dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1014985 num_examples: 2998 - name: test num_bytes: 223825 num_examples: 643 - name: eval num_bytes: 219173 num_examples: 643 download_size: 926742 dataset_size: 1457983 - config_name: sot-ssw features: - name: sot dtype: string - name: ssw dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1036642 num_examples: 3075 - name: test num_bytes: 217328 num_examples: 659 - name: eval num_bytes: 222863 num_examples: 660 download_size: 950426 dataset_size: 1476833 - config_name: sot-tsn features: - name: sot dtype: string - name: tsn dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1118346 num_examples: 3019 - name: test num_bytes: 237826 num_examples: 647 - name: eval num_bytes: 235279 num_examples: 647 download_size: 981019 dataset_size: 1591451 - config_name: sot-tso features: - name: sot dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1057631 num_examples: 3027 - name: test num_bytes: 226229 num_examples: 649 - name: eval num_bytes: 222671 num_examples: 649 download_size: 943068 dataset_size: 1506531 - config_name: sot-ven features: - name: sot dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 201166 num_examples: 461 - name: test num_bytes: 44845 num_examples: 99 - name: eval num_bytes: 42607 num_examples: 99 download_size: 191947 dataset_size: 288618 - config_name: sot-xho features: - name: sot dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1042930 num_examples: 3098 - name: test num_bytes: 217327 num_examples: 664 - name: eval num_bytes: 223313 num_examples: 665 download_size: 964792 dataset_size: 1483570 - config_name: sot-zul features: - name: sot dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1017097 num_examples: 3079 - name: test num_bytes: 223761 num_examples: 660 - name: eval num_bytes: 227514 num_examples: 660 download_size: 949761 dataset_size: 1468372 - config_name: ssw-tsn features: - name: ssw dtype: string - name: tsn dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1124400 num_examples: 3110 - name: test num_bytes: 238160 num_examples: 666 - name: eval num_bytes: 246176 num_examples: 667 download_size: 1012570 dataset_size: 1608736 - config_name: ssw-tso features: - name: ssw dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1069540 num_examples: 3142 - name: test num_bytes: 237608 num_examples: 673 - name: eval num_bytes: 231657 num_examples: 674 download_size: 980833 dataset_size: 1538805 - config_name: ssw-ven features: - name: ssw dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 170825 num_examples: 401 - name: test num_bytes: 34774 num_examples: 86 - name: eval num_bytes: 39434 num_examples: 87 download_size: 170522 dataset_size: 245033 - config_name: ssw-xho features: - name: ssw dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1047037 num_examples: 3193 - name: test num_bytes: 227505 num_examples: 684 - name: eval num_bytes: 219981 num_examples: 685 download_size: 992683 dataset_size: 1494523 - config_name: ssw-zul features: - name: ssw dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1054772 num_examples: 3255 - name: test num_bytes: 231524 num_examples: 698 - name: eval num_bytes: 223701 num_examples: 698 download_size: 997182 dataset_size: 1509997 - config_name: tsn-tso features: - name: tsn dtype: string - name: tso dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1141163 num_examples: 3023 - name: test num_bytes: 244100 num_examples: 648 - name: eval num_bytes: 242886 num_examples: 648 download_size: 998631 dataset_size: 1628149 - config_name: tsn-ven features: - name: tsn dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 175353 num_examples: 361 - name: test num_bytes: 39141 num_examples: 77 - name: eval num_bytes: 37453 num_examples: 78 download_size: 165408 dataset_size: 251947 - config_name: tsn-xho features: - name: tsn dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1095514 num_examples: 2989 - name: test num_bytes: 231544 num_examples: 640 - name: eval num_bytes: 227856 num_examples: 641 download_size: 986295 dataset_size: 1554914 - config_name: tsn-zul features: - name: tsn dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1090114 num_examples: 3038 - name: test num_bytes: 232488 num_examples: 651 - name: eval num_bytes: 240758 num_examples: 651 download_size: 989654 dataset_size: 1563360 - config_name: tso-ven features: - name: tso dtype: string - name: ven dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 171428 num_examples: 375 - name: test num_bytes: 33029 num_examples: 80 - name: eval num_bytes: 38079 num_examples: 81 download_size: 163896 dataset_size: 242536 - config_name: tso-xho features: - name: tso dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1049926 num_examples: 3066 - name: test num_bytes: 224708 num_examples: 657 - name: eval num_bytes: 221699 num_examples: 657 download_size: 967978 dataset_size: 1496333 - config_name: tso-zul features: - name: tso dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1059314 num_examples: 3106 - name: test num_bytes: 224935 num_examples: 666 - name: eval num_bytes: 225248 num_examples: 666 download_size: 970505 dataset_size: 1509497 - config_name: ven-xho features: - name: ven dtype: string - name: xho dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 164305 num_examples: 401 - name: test num_bytes: 36290 num_examples: 86 - name: eval num_bytes: 35520 num_examples: 87 download_size: 165177 dataset_size: 236115 - config_name: ven-zul features: - name: ven dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 138178 num_examples: 336 - name: test num_bytes: 32949 num_examples: 72 - name: eval num_bytes: 30697 num_examples: 72 download_size: 143542 dataset_size: 201824 - config_name: xho-zul features: - name: xho dtype: string - name: zul dtype: string - name: score dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 1062980 num_examples: 3276 - name: test num_bytes: 226001 num_examples: 702 - name: eval num_bytes: 225893 num_examples: 703 download_size: 1011124 dataset_size: 1514874 --- # The Vuk'uzenzele South African Multilingual Corpus Github: [https://github.com/dsfsi/vukuzenzele-nlp/](https://github.com/dsfsi/vukuzenzele-nlp/) Zenodo: [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.7598539.svg)](https://doi.org/10.5281/zenodo.7598539) Arxiv Preprint: [![arXiv](https://img.shields.io/badge/arXiv-2303.03750-b31b1b.svg)](https://arxiv.org/abs/2303.03750) Give Feedback 📑: [DSFSI Resource Feedback Form](https://docs.google.com/forms/d/e/1FAIpQLSf7S36dyAUPx2egmXbFpnTBuzoRulhL5Elu-N1eoMhaO7v10w/formResponse) # About The dataset was obtained from the South African government magazine Vuk'uzenzele, created by the [Government Communication and Information System (GCIS)](https://www.gcis.gov.za/). The original raw PDFS were obtatined from the [Vuk'uzenzele website](https://www.vukuzenzele.gov.za/). The datasets contain government magazine editions in 11 languages, namely: | Language | Code | Language | Code | |------------|-------|------------|-------| | English | (eng) | Sepedi | (sep) | | Afrikaans | (afr) | Setswana | (tsn) | | isiNdebele | (nbl) | Siswati | (ssw) | | isiXhosa | (xho) | Tshivenda | (ven) | | isiZulu | (zul) | Xitstonga | (tso) | | Sesotho | (nso) | ## Available pairings The alignment direction is bidrectional, i.e. xho-zul is zul-xho afr-eng; afr-nbl; afr-nso; afr-sot; afr-ssw; afr-tsn; afr-tso; afr-ven; afr-xho; afr-zul eng-nbl; eng-nso; eng-sot ;eng-ssw; eng-tsn; eng-tso; eng-ven; eng-xho; eng-zul nbl-nso; nbl-sot; nbl-ssw; nbl-tsn; nbl-tso; nbl-ven; nbl-xho; nbl-zul nso-sot; nso-ssw; nso-tsn; nso-tso; nso-ven; nso-xho; nso-zul sot-ssw; sot-tsn; sot-tso; sot-ven; sot-xho; sot-zul ssw-tsn; ssw-tso; ssw-ven; ssw-xho; ssw-zul tsn-tso; tsn-ven; tsn-xho; tsn-zul tso-ven; tso-xho; tso-zul ven-xho; ven-zul xho-zul # Disclaimer This dataset contains machine-readable data extracted from PDF documents, from https://www.vukuzenzele.gov.za/, provided by the Government Communication Information System (GCIS). While efforts were made to ensure the accuracy and completeness of this data, there may be errors or discrepancies between the original publications and this dataset. No warranties, guarantees or representations are given in relation to the information contained in the dataset. The members of the Data Science for Societal Impact Research Group bear no responsibility and/or liability for any such errors or discrepancies in this dataset. The Government Communication Information System (GCIS) bears no responsibility and/or liability for any such errors or discrepancies in this dataset. It is recommended that users verify all information contained herein before making decisions based upon this information. # Datasets The datasets consist of pairwise sentence aligned data. There are 55 distinct datasets of paired sentences. The data is obtained by comparing [LASER](https://github.com/facebookresearch/LASER) embeddings of sentence tokens between 2 languages. If the similarity is high, the sentences are deemed semantic equivalents of one another and the observation is outputted. Naming convention: The naming structure of the pairwise_sentence_aligned folder is `aligned-{src_lang_code}-{tgt_lang_code}.csv`. For example, `aligned-afr-zul.csv` is the aligned sentences between Afrikaans and isiZulu. The data is in .csv format and the columns are `src_text`,`tgt_text`,`cosine_score` where: - `src_text` is the source sentence - `tgt_text` is the target sentence - `cosine_score` is the cosine similarity score obtained by comparing the sentence embeddings, it ranges from 0 to 1 **Note:** The notion of source (src) and target (tgt) are only necessary for distinction between the languages used in the aligned pair, as the sentence semantics should be bidirectional. (hallo <-> sawubona) # Citation Vukosi Marivate, Andani Madodonga, Daniel Njini, Richard Lastrucci, Isheanesu Dzingirai, Jenalea Rajab. **The Vuk'uzenzele South African Multilingual Corpus**, 2023 > @dataset{marivate_vukosi_2023_7598540, author = {Marivate, Vukosi and Njini, Daniel and Madodonga, Andani and Lastrucci, Richard and Dzingirai, Isheanesu Rajab, Jenalea}, title = {The Vuk'uzenzele South African Multilingual Corpus}, month = feb, year = 2023, publisher = {Zenodo}, doi = {10.5281/zenodo.7598539}, url = {https://doi.org/10.5281/zenodo.7598539} } ### Licence * Licence for Data - [CC 4.0 BY](LICENSE.md)
[ -0.3470764458179474, -0.2786097824573517, 0.36486339569091797, 0.36671867966651917, -0.38448959589004517, -0.1673501878976822, -0.2062310427427292, -0.2694636881351471, 0.5848760008811951, 0.5564826130867004, -0.47268399596214294, -0.6824679374694824, -0.5806552171707153, 0.3103688955307007, -0.12592683732509613, 0.6862095594406128, -0.05276291072368622, 0.024142684414982796, -0.0411725714802742, -0.3698981702327728, -0.3408990502357483, -0.5654722452163696, -0.0726231187582016, -0.16971004009246826, 0.4357837736606598, 0.45213937759399414, 0.7873846888542175, 0.3251604437828064, 0.19238895177841187, 0.3090185225009918, -0.127231627702713, 0.19295333325862885, -0.26370686292648315, -0.16434846818447113, -0.041316691786050797, -0.2265172004699707, -0.25224408507347107, -0.04871993884444237, 0.542091965675354, 0.7733358144760132, -0.23977667093276978, 0.1463959962129593, 0.15596993267536163, 0.7370968461036682, -0.6418999433517456, 0.3756084144115448, -0.24763207137584686, -0.1909170001745224, -0.3745516240596771, 0.02436770498752594, -0.3194604218006134, -0.467443585395813, -0.1037062555551529, -0.5771430730819702, -0.11975347250699997, -0.2173006385564804, 1.4247514009475708, -0.09997113049030304, -0.3510596454143524, -0.19192945957183838, -0.3764559030532837, 0.6337343454360962, -0.8271889090538025, 0.3188190162181854, 0.5355358719825745, 0.07897696644067764, -0.2921375036239624, -0.6674576997756958, -0.6954405307769775, 0.05372539535164833, -0.3764086663722992, 0.32675108313560486, -0.4261411130428314, -0.14719948172569275, 0.14760571718215942, 0.19117306172847748, -0.5862582921981812, -0.07173573225736618, -0.5834833383560181, -0.3277621567249298, 0.788292646408081, -0.04501020163297653, 0.46063050627708435, -0.9755822420120239, -0.45539581775665283, -0.08122913539409637, -0.5791441798210144, 0.11052023619413376, 0.7966877222061157, 0.541961669921875, -0.4893006384372711, 0.8726078271865845, -0.15019482374191284, 0.7692457437515259, -0.23815776407718658, 0.16863903403282166, 0.9200459122657776, -0.9252201318740845, -0.12688520550727844, 0.11771752685308456, 1.0956472158432007, 0.5143734812736511, 0.3488779664039612, -0.1219707578420639, 0.059842176735401154, 0.2223973572254181, 0.17018455266952515, -0.6314646601676941, -0.04231145977973938, 0.18923547863960266, -0.38645172119140625, -0.11319341510534286, 0.35800379514694214, -0.9446535706520081, 0.014440888538956642, -0.3468443751335144, 0.3951031267642975, -0.5320517420768738, -0.4618966281414032, 0.09644387662410736, -0.04609533026814461, 0.2952982187271118, -0.07546121627092361, -0.8635894656181335, -0.020916322246193886, 0.35511961579322815, 0.8921430706977844, 0.05247395485639572, -0.3375781178474426, 0.05692915618419647, 0.13787022233009338, -0.24475951492786407, 0.6886239647865295, -0.4615594446659088, -0.5326827168464661, 0.12306387722492218, 0.16457821428775787, -0.3387959599494934, -0.32935631275177, 0.8243004679679871, -0.16451376676559448, 0.24489419162273407, -0.5961037278175354, -0.2792622745037079, -0.318302720785141, -0.1492692530155182, -0.570217490196228, 1.411238431930542, 0.5012499690055847, -0.6794388890266418, 0.448641300201416, -0.5018802285194397, -0.509702742099762, -0.0503232441842556, -0.4298800528049469, -0.40404585003852844, -0.24108311533927917, 0.45960161089897156, 0.41536760330200195, -0.2670442759990692, 0.3839045763015747, -0.2861853241920471, -0.27348417043685913, 0.002880085026845336, -0.15520896017551422, 1.3603876829147339, 0.22045034170150757, -0.37478914856910706, 0.09352951496839523, -1.0572603940963745, 0.17710429430007935, 0.15129104256629944, -0.20429803431034088, -0.42743903398513794, -0.2260666787624359, 0.15190039575099945, 0.4923572242259979, 0.1226106807589531, -0.7898041605949402, 0.3125399947166443, -0.3904748857021332, 0.6532576680183411, 0.8478464484214783, -0.03706403821706772, 0.34578439593315125, -0.1990588903427124, 0.42523834109306335, 0.20412607491016388, 0.2512337863445282, 0.22566130757331848, -0.6325751543045044, -0.6172667145729065, -0.31784969568252563, 0.4279114007949829, 0.786041796207428, -0.8396114706993103, 0.741588830947876, -0.4824133813381195, -0.561887264251709, -0.6864300966262817, 0.11143836379051208, 0.24497194588184357, 0.47190701961517334, 0.21254099905490875, -0.17118924856185913, -0.7231174111366272, -1.2001994848251343, 0.03624004125595093, -0.21948179602622986, 0.04284059628844261, 0.4218269884586334, 0.6504727602005005, -0.26015007495880127, 1.0533579587936401, -0.4867909848690033, -0.40508338809013367, -0.17307884991168976, -0.004185355268418789, 0.40078338980674744, 0.2989392876625061, 1.0322903394699097, -1.0722484588623047, -0.4960608184337616, -0.20538048446178436, -0.98262619972229, -0.19955933094024658, 0.04425869509577751, -0.18065302073955536, 0.2840605676174164, 0.34607452154159546, -0.6534850597381592, 0.40253332257270813, 0.5949975252151489, -0.6205756068229675, 0.3987959325313568, -0.39989715814590454, 0.26651981472969055, -1.3108323812484741, 0.1619735211133957, -0.1271955817937851, 0.27418264746665955, -0.5475807785987854, -0.19744548201560974, 0.04128478839993477, -0.11903039366006851, -0.458187997341156, 0.35572683811187744, -0.6379320025444031, -0.16494090855121613, 0.2571694254875183, 0.4630448520183563, -0.15439102053642273, 0.486404150724411, -0.16716912388801575, 1.0886329412460327, 0.5835233926773071, -0.4527518153190613, 0.4068644642829895, 0.7157578468322754, -0.6597316861152649, 0.5911579728126526, -0.6385502815246582, -0.13374298810958862, -0.09108631312847137, 0.0431034155189991, -1.0144976377487183, -0.1037396714091301, 0.5290772318840027, -0.6686162948608398, 0.06841369718313217, -0.032592449337244034, -0.6168811321258545, -0.4652268886566162, -0.3000290095806122, -0.059359557926654816, 0.24258169531822205, -0.3286653757095337, 0.6338651776313782, 0.410767138004303, -0.3025849163532257, -0.9055759906768799, -1.1491888761520386, 0.27778664231300354, -0.007454308215528727, -0.937986433506012, 0.20804493129253387, 0.22307699918746948, -0.16363590955734253, 0.4601333737373352, 0.07452553510665894, 0.03896521031856537, 0.05222689360380173, 0.3333395719528198, 0.383076012134552, -0.2428426295518875, -0.02157609537243843, -0.0032040849328041077, -0.17880688607692719, -0.04941401258111, -0.04775184392929077, 0.7189059257507324, -0.02907966822385788, -0.1369439959526062, -0.2676655948162079, 0.5516876578330994, 0.5412363409996033, -0.40303516387939453, 1.2170090675354004, 0.6406666040420532, -0.23665085434913635, 0.27971771359443665, -0.39710694551467896, 0.14238367974758148, -0.39439669251441956, 0.21699798107147217, -0.4438025951385498, -0.6742913126945496, 0.759025514125824, 0.2541346848011017, -0.1118292510509491, 0.7892652750015259, 0.5439735054969788, 0.09491951763629913, 0.624729335308075, 0.5073680281639099, -0.2591286301612854, 0.24717049300670624, -0.627115786075592, 0.31062591075897217, -0.812284529209137, -0.5605707764625549, -0.6992891430854797, 0.0036607361398637295, -0.9269446730613708, -0.20877325534820557, 0.09517910331487656, 0.17922696471214294, -0.01045569684356451, 0.7036247253417969, -0.3116636574268341, 0.029064776375889778, 0.5927773118019104, 0.19260084629058838, -0.08469945192337036, 0.15726126730442047, -0.31347963213920593, -0.14805684983730316, -0.7966027855873108, -0.7064420580863953, 1.3821369409561157, -0.18884356319904327, 0.37025317549705505, 0.4316404163837433, 0.855100154876709, 0.3932962119579315, -0.1591959297657013, -0.43327954411506653, 0.5072409510612488, -0.1512771099805832, -0.45779815316200256, -0.4168333113193512, -0.49296292662620544, -1.209857702255249, 0.4141201674938202, -0.3060929477214813, -0.903066873550415, 0.7009729146957397, -0.3312981426715851, -0.4880724847316742, 0.18451671302318573, -0.7573987245559692, 0.8457077145576477, 0.021238435059785843, -0.30144885182380676, -0.2619994282722473, -0.8624501824378967, 0.035095177590847015, 0.14477993547916412, 0.3148709535598755, -0.21987318992614746, -0.06678631156682968, 0.7500129342079163, -0.5680296421051025, 0.7236080169677734, -0.0828535407781601, -0.13272632658481598, 0.43999311327934265, -0.44929686188697815, 0.25960254669189453, 0.032782476395368576, -0.3726455867290497, 0.5501288771629333, -0.10513277351856232, -0.5400776863098145, -0.46126407384872437, 0.7582383751869202, -1.081859827041626, -0.41781845688819885, -0.7224277853965759, -0.5597714185714722, 0.04228599742054939, 0.3567039370536804, 0.5100051164627075, 0.6383619904518127, -0.026325460523366928, 0.27789807319641113, 0.3267897963523865, -0.17737866938114166, 0.3948126435279846, 0.44435515999794006, 0.012356994673609734, -0.6673917174339294, 0.6725122332572937, 0.5983847379684448, 0.2547721862792969, 0.19114693999290466, -0.0513426773250103, -0.37290725111961365, -0.28960615396499634, -0.5808655023574829, 0.40711092948913574, -0.5466216206550598, -0.22001005709171295, -0.6745768189430237, -0.22526250779628754, -0.8578949570655823, -0.16370779275894165, 0.011388507671654224, -0.49794673919677734, -0.25730282068252563, -0.30110418796539307, 0.4851394295692444, 0.6216604113578796, -0.6220629215240479, 0.18383820354938507, -0.5773144364356995, 0.4070698022842407, -0.09975411742925644, 0.1296772062778473, -0.1830996721982956, -0.4271361231803894, -0.07005764544010162, 0.25859344005584717, -0.1341550648212433, -1.0122686624526978, 0.39760199189186096, 0.03452831134200096, 0.7518585920333862, 0.22794972360134125, 0.25694623589515686, 0.5937375426292419, -0.36574602127075195, 1.0790971517562866, 0.1454760879278183, -0.7551358938217163, 0.5693971514701843, -0.2886217534542084, 0.20355883240699768, 0.7178887128829956, 0.4667346775531769, -0.6679588556289673, -0.3443123698234558, -0.6987534165382385, -1.1953935623168945, 0.8932768106460571, 0.509063720703125, 0.18441256880760193, -0.04810655862092972, 0.3818522095680237, -0.10611769556999207, 0.24266266822814941, -0.6559398174285889, -0.7472584247589111, -0.15051747858524323, -0.2230910360813141, -0.3398226797580719, -0.5697829723358154, -0.19517096877098083, -0.2836281359195709, 0.8866217136383057, 0.2625254690647125, 0.43467649817466736, 0.4161590039730072, -0.2325899451971054, -0.07737716287374496, 0.48121148347854614, 0.793934166431427, 0.839364767074585, -0.20215965807437897, -0.15055346488952637, -0.07752572000026703, -0.6940879821777344, 0.2906650900840759, 0.38631075620651245, -0.6263628005981445, 0.2683243155479431, 0.3764247000217438, 0.9599165320396423, 0.27635419368743896, -0.26011765003204346, 0.6466488838195801, 0.10895587503910065, -0.3137991428375244, -0.45804765820503235, -0.31158584356307983, 0.21262814104557037, 0.12439052015542984, 0.35761749744415283, 0.08919693529605865, -0.022055165842175484, -0.5875357389450073, 0.22234009206295013, 0.15740226209163666, -0.22869737446308136, -0.09856690466403961, 0.5491669178009033, 0.18215343356132507, -0.1099710687994957, 0.7262966632843018, -0.21805430948734283, -0.540343165397644, 0.6219416856765747, 0.3773340582847595, 0.7426692843437195, -0.29924526810646057, 0.5287709832191467, 0.6896831393241882, 0.627364993095398, 0.07357984781265259, 0.7766023278236389, 0.0768401101231575, -0.7671806812286377, -0.27357804775238037, -0.4439925253391266, 0.04874330386519432, 0.20670928061008453, -1.0260494947433472, 0.3019607663154602, -0.09627493470907211, -0.24573054909706116, 0.02963392063975334, 0.03235955163836479, -0.37891218066215515, -0.056967124342918396, 0.11182154715061188, 0.7529281973838806, -1.3336747884750366, 0.8533082008361816, 0.6131398677825928, -0.9088770151138306, -0.8309361338615417, -0.20043592154979706, 0.003825455205515027, -0.7133939266204834, 0.5455459952354431, 0.1468166559934616, -0.10331081598997116, -0.2774779498577118, -0.32292261719703674, -0.9709606766700745, 1.1900956630706787, 0.5632966160774231, -0.5688236355781555, 0.08971838653087616, 0.3571648597717285, 0.4680873453617096, 0.13275010883808136, 0.18040747940540314, 0.4848456084728241, 0.7350854277610779, -0.17876973748207092, -0.8708571791648865, 0.012859182432293892, -0.42201343178749084, 0.09737489372491837, 0.4087999165058136, -0.928215503692627, 0.9230442643165588, -0.1471526175737381, -0.2963751554489136, 0.059539515525102615, 0.9750165343284607, -0.08455520123243332, -0.006028393749147654, 0.3323234021663666, 0.7903540134429932, 0.592578649520874, -0.3725399971008301, 1.0160975456237793, -0.4119566082954407, 0.3903440535068512, 0.9928801655769348, -0.254533976316452, 0.7680579423904419, 0.6068863272666931, -0.46362432837486267, 0.6199596524238586, 0.44836241006851196, -0.22752077877521515, 0.6203631162643433, -0.08447035402059555, -0.12921632826328278, -0.19170847535133362, -0.1006321981549263, -0.5492732524871826, 0.5662645101547241, 0.20949053764343262, -0.22867357730865479, -0.32677680253982544, 0.14082518219947815, 0.25930914282798767, 0.4419703185558319, -0.2286083996295929, 0.4420538544654846, 0.1673891246318817, -0.42408838868141174, 0.5272237658500671, -0.1581786870956421, 0.6732791066169739, -0.9486942887306213, 0.10100888460874557, -0.1439073532819748, 0.24292154610157013, -0.43916624784469604, -1.166327953338623, 0.313532292842865, -0.09161986410617828, -0.25638189911842346, 0.015552596189081669, 0.4026889503002167, -0.5567970275878906, -0.6652306318283081, 0.7301745414733887, 0.2770017385482788, 0.048988401889801025, 0.2961708605289459, -0.8024097084999084, -0.01145867444574833, 0.11511418223381042, -0.26600420475006104, 0.30118393898010254, 0.31780028343200684, 0.008034596219658852, 0.5560577511787415, 0.45342928171157837, 0.20442317426204681, 0.10445582121610641, 0.15782396495342255, 0.8772886395454407, -0.4225306808948517, -0.5785638689994812, -0.8032535314559937, 0.3205067217350006, -0.3310681879520416, -0.5485178828239441, 1.303375005722046, 0.8490929007530212, 1.062859058380127, -0.05497574433684349, 0.9822778105735779, -0.4618334472179413, 0.37656575441360474, -0.5245968699455261, 0.8192649483680725, -0.5618511438369751, -0.26025110483169556, -0.4753558933734894, -0.9826796650886536, -0.16340087354183197, 0.7462259531021118, -0.29467853903770447, -0.06595152616500854, 0.42969971895217896, 0.9047531485557556, 0.11065056920051575, -0.015668516978621483, 0.09411073476076126, 0.355774849653244, -0.03605154901742935, 0.5276185274124146, 0.3761713206768036, -0.7182031273841858, 0.5843926072120667, -0.6251253485679626, -0.049368083477020264, -0.10378625243902206, -0.7698322534561157, -0.604058563709259, -0.8448318839073181, -0.30243393778800964, -0.5929494500160217, 0.14016929268836975, 0.9066109657287598, 0.2503609359264374, -1.0183202028274536, -0.38207414746284485, 0.29912689328193665, -0.18436786532402039, -0.3793550729751587, -0.18843288719654083, 0.8442978858947754, 0.07392537593841553, -0.541864275932312, 0.17753654718399048, 0.14851915836334229, -0.08896911889314651, -0.035086777061223984, -0.006577345076948404, -0.8547889590263367, 0.033613692969083786, 0.7905501127243042, 0.3896534740924835, -0.7884957790374756, -0.2684837281703949, -0.08035223931074142, -0.4278152883052826, 0.20728442072868347, 0.45799899101257324, -0.3154204189777374, 0.38855525851249695, 0.5788944959640503, 0.25653213262557983, 0.32958146929740906, -0.07543154060840607, 0.3535092771053314, -0.5018311738967896, 0.5811834931373596, -0.10581738501787186, 0.5059767365455627, 0.3884252607822418, -0.3772413730621338, 0.7996538877487183, 0.4055994153022766, -0.32692575454711914, -0.7923787236213684, 0.014908348210155964, -1.3385989665985107, -0.18272201716899872, 1.3593209981918335, -0.2399526834487915, -0.32785457372665405, -0.10987483710050583, -0.33516013622283936, 0.42696264386177063, -0.5408822894096375, 0.4638454020023346, 0.8839662671089172, 0.0906524583697319, 0.12292741239070892, -0.5097965598106384, 0.1503816395998001, 0.33707690238952637, -0.7357961535453796, 0.009161174297332764, 0.3470357060432434, 0.2629159092903137, 0.27166011929512024, 0.5960284471511841, -0.3081204295158386, 0.2457473874092102, 0.15914809703826904, 0.2639060616493225, -0.06605828553438187, 0.03479694575071335, -0.4144216477870941, -0.05959780886769295, -0.4774363338947296, -0.32469725608825684 ]
C-MTEB/MultilingualSentiment-classification
C-MTEB
2023-07-28T13:29:38Z
200
0
[ "region:us" ]
null
2023-07-28T13:29:08Z
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* dataset_info: features: - name: text dtype: string - name: label dtype: class_label: names: '0': positive '1': neutral '2': negative splits: - name: train num_bytes: 19594086 num_examples: 120000 - name: validation num_bytes: 483785 num_examples: 3000 - name: test num_bytes: 491522 num_examples: 3000 download_size: 14087126 dataset_size: 20569393 --- # Dataset Card for "MultilingualSentiment-classification" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5068361163139343, -0.1046394556760788, -0.0023523513227701187, 0.5255275964736938, -0.0749870017170906, 0.22911174595355988, -0.09672864526510239, -0.4703961908817291, 0.7023376226425171, 0.12894345819950104, -0.650073230266571, -0.7162650227546692, -0.6024560332298279, -0.1764809489250183, -0.10820513963699341, 1.2400120496749878, -0.17631617188453674, 0.2791686952114105, -0.3458750247955322, -0.4206447899341583, -0.2901986241340637, -0.8545058965682983, -0.7060994505882263, -0.5520210266113281, 0.9783000946044922, 0.6044252514839172, 0.2964296340942383, 0.34437376260757446, 0.5542138814926147, 0.1929052770137787, 0.08508798480033875, -0.22405840456485748, -0.07656984031200409, -0.08745530992746353, -0.16089442372322083, -0.5307325124740601, -0.9762023091316223, 0.058782726526260376, 0.6714286804199219, 0.6604999303817749, -0.3539256155490875, 0.779900848865509, -0.13860838115215302, 0.5329826474189758, -0.49368491768836975, 0.7495642304420471, 0.038316935300827026, 0.18380998075008392, -0.7633325457572937, -0.2685436010360718, -0.08126141130924225, -0.30411481857299805, -0.27767178416252136, -0.8411800861358643, 0.2339548021554947, -0.12267962098121643, 0.7648332118988037, 0.037706609815359116, -0.3066824972629547, -0.04312707483768463, -0.57150799036026, 0.2559565007686615, -0.031663745641708374, 0.5757628083229065, 0.578836977481842, 0.6404259204864502, 0.32489779591560364, -0.5286741852760315, -0.464005708694458, 0.1196386069059372, 0.01729249581694603, 0.3388969898223877, 0.3451569676399231, -0.12065654993057251, 0.5028750896453857, 0.5454900860786438, -0.6400924324989319, -0.0037057253066450357, -0.453510046005249, -0.5139715671539307, 0.7243024706840515, 0.10371088981628418, 0.31418538093566895, -0.08216697722673416, 0.04775366932153702, -0.2149430215358734, -0.3679361343383789, -0.08265266567468643, 0.4224604666233063, 0.4085754156112671, -1.0227065086364746, 0.6397379040718079, -0.06367844343185425, 0.37867021560668945, -0.0744936391711235, 0.26213109493255615, 0.8920737504959106, -0.42475396394729614, 0.0937768742442131, 0.14634549617767334, 0.3364255428314209, 0.37670233845710754, 0.14656038582324982, 0.08837319165468216, -0.05767764896154404, 0.2850513458251953, -0.22451283037662506, -1.0536181926727295, -0.6966593861579895, 0.09464533627033234, -0.6695041656494141, -0.11858224868774414, 0.17118313908576965, -1.005046010017395, -0.38094425201416016, -0.5460829138755798, 0.0703926682472229, 0.09241271018981934, -0.8990982174873352, -0.19654512405395508, -0.5953015685081482, 0.286165326833725, 0.16474314033985138, -0.7957310676574707, 0.20777681469917297, 0.53985595703125, 0.763772189617157, 0.16669264435768127, -0.4895826578140259, -0.5451679229736328, 0.16719284653663635, 0.06724896281957626, 0.8160659670829773, -0.6481115221977234, -0.2521563172340393, 0.08557458966970444, 0.437138170003891, -0.1338213086128235, -0.13530710339546204, 0.6659890413284302, -0.39687076210975647, 0.24298694729804993, -0.46760794520378113, -0.4070777893066406, -0.023956840857863426, 0.5494464635848999, -0.9589552283287048, 1.0163052082061768, 0.2830665409564972, -0.7339151501655579, 0.6019532680511475, -1.269141435623169, -0.45435675978660583, 0.6611335873603821, 0.03020635060966015, -0.34838762879371643, -0.03852294385433197, -0.07817617058753967, 0.6186245083808899, -0.013700508512556553, 0.38749417662620544, -0.8090174794197083, 0.054615363478660583, 0.17517077922821045, -0.21787860989570618, 1.0739099979400635, 0.34033823013305664, 0.253210186958313, -0.11953451484441757, -1.0391789674758911, -0.26432737708091736, 0.24659614264965057, -0.3769803047180176, -0.4485189914703369, -0.27876657247543335, 0.6625128984451294, 0.09991468489170074, 0.5164650678634644, -0.5690662264823914, 0.4614886939525604, 0.2883538603782654, 0.00004640313272830099, 0.5106015801429749, -0.12133791297674179, 0.4047548770904541, -0.3792509138584137, 0.6387599110603333, 0.051333583891391754, 0.35372015833854675, -0.08227990567684174, -0.29200252890586853, -0.7337459325790405, 0.024086836725473404, 0.6238101720809937, 0.9189278483390808, -0.9086759090423584, 0.4582388997077942, -0.2186117321252823, -0.7403623461723328, -0.21222031116485596, 0.060631200671195984, 0.2913706302642822, 0.10991955548524857, 0.3890440762042999, -0.4105052649974823, -0.6821790337562561, -0.4754859209060669, 0.29828208684921265, -0.23043900728225708, 0.22265413403511047, 0.2694382965564728, 0.7796484231948853, -0.6410977840423584, 0.38762399554252625, -0.5685001611709595, -0.2517794668674469, 0.11972397565841675, -0.044666074216365814, 0.08404391258955002, 0.7125077843666077, 1.0485471487045288, -0.856856107711792, -0.4697556793689728, -0.2086755633354187, -0.7195890545845032, -0.11097007989883423, 0.2578473687171936, -0.4211116135120392, 0.14758771657943726, 0.2995527982711792, -0.27169764041900635, 0.4203678369522095, 0.8725433349609375, -0.43424785137176514, 0.20271535217761993, 0.05254456400871277, 0.2397928684949875, -1.4901535511016846, 0.2262527048587799, 0.03379187732934952, -0.08745172619819641, -0.37618255615234375, 0.14189180731773376, 0.2805785536766052, -0.32551780343055725, -0.11233016848564148, 0.7440210580825806, -0.45282259583473206, 0.0005362847587093711, 0.0991426482796669, -0.08963000029325485, -0.17954108119010925, 0.2480369508266449, 0.4075568616390228, 0.4670385718345642, 1.1030082702636719, -0.7066430449485779, 1.023063063621521, 0.4584667384624481, 0.011903745122253895, 1.1612675189971924, -0.5008848905563354, 0.04136134311556816, -0.2288028448820114, 0.2784956395626068, -0.668034017086029, -0.7362889647483826, 0.5199028849601746, -0.40729618072509766, 0.6543522477149963, -0.6053928136825562, -0.7563585638999939, -0.629019021987915, -0.28810372948646545, 0.9130898118019104, 0.14938321709632874, -0.9320774078369141, 0.42700010538101196, 0.7792314887046814, -0.2309083342552185, -0.20590336620807648, -1.05487859249115, 0.09012962877750397, -0.31025663018226624, -0.1469324827194214, 0.36103585362434387, -0.5148324966430664, 0.16383212804794312, -0.15526843070983887, 0.5846309661865234, -0.23413532972335815, -0.11019054800271988, 0.2557516098022461, 0.13408511877059937, -0.15309295058250427, 0.7694266438484192, 0.09851367771625519, -0.6399122476577759, 0.0015274295583367348, -0.06050936505198479, 0.4787469804286957, -0.10783418267965317, -0.3177764117717743, -0.3216473460197449, 0.510770857334137, 0.36847439408302307, -0.4312695860862732, 0.5328031182289124, 1.3304929733276367, -0.6701397895812988, 0.09823965281248093, -0.4632820785045624, 0.04905960336327553, -0.4003521800041199, 0.14961078763008118, -0.34275999665260315, -0.6522419452667236, 0.5902193784713745, -0.044097140431404114, -0.021548235788941383, 0.6592543125152588, 0.7106686234474182, 0.0004908469854854047, 0.7178752422332764, 0.6224315166473389, -0.3419712483882904, 0.5934001207351685, -0.20993883907794952, -0.3551480174064636, -0.6084681749343872, -0.553366482257843, -0.6854457855224609, -0.4007989466190338, -0.9164687395095825, -0.23406551778316498, 0.032639093697071075, -0.10691838711500168, -0.17366424202919006, 0.5465962290763855, -0.5932711362838745, 0.2849029004573822, 0.5691859126091003, -0.09020845592021942, -0.06351476907730103, -0.02810080535709858, 0.1920856237411499, 0.1477804034948349, -0.7996752262115479, -0.19240739941596985, 1.12921142578125, 0.5371973514556885, 1.0156320333480835, 0.508520781993866, 0.8364394307136536, 0.07431256026029587, 0.5074306726455688, -0.4070951044559479, 0.28160393238067627, -0.10252081602811813, -0.7342031598091125, 0.06980065256357193, -0.3506180942058563, -0.9203069806098938, -0.3608466386795044, -0.38549306988716125, -0.26514652371406555, 0.3947615921497345, 0.40402692556381226, -0.0034679302480071783, 0.25350135564804077, -0.921406090259552, 1.0401980876922607, -0.38223129510879517, 0.09294997155666351, -0.17433230578899384, -0.6360978484153748, 0.0573701374232769, 0.0698518455028534, 0.24311022460460663, -0.3589424192905426, -0.10597708821296692, 1.0853501558303833, -0.15646810829639435, 1.2451956272125244, -0.8061630129814148, 0.062462218105793, 0.031228337436914444, -0.25099506974220276, -0.07855360954999924, 0.5159100294113159, 0.08257216960191727, 0.3639385402202606, 0.3737151622772217, -0.5154804587364197, -0.22221362590789795, 0.6863967180252075, -0.6943706274032593, 0.3369823694229126, -0.5953562259674072, -0.4361189007759094, 0.018463483080267906, 0.2579086422920227, 0.2548693120479584, 0.7990784645080566, -0.4612830579280853, -0.06075615808367729, 0.6310974359512329, 0.07958734035491943, 0.46973636746406555, 0.4895981550216675, -0.37243810296058655, -0.39022350311279297, 1.157839059829712, 0.3265887200832367, -0.4062008857727051, 0.5902461409568787, 0.327741801738739, -0.4311089515686035, -0.25081944465637207, -0.6725947260856628, 0.3225037157535553, -0.5585286021232605, -0.455484002828598, -0.31765690445899963, -0.37899646162986755, -0.5567747950553894, 0.14582473039627075, -0.17044563591480255, -0.7493866086006165, -0.6350830793380737, -0.5114951133728027, 0.9860484004020691, 0.5681588053703308, -0.5953741073608398, 0.4488849341869354, -1.091621994972229, 0.45573148131370544, 0.18499645590782166, 1.1183748245239258, -0.2720929682254791, -0.38571643829345703, -0.5113512277603149, -0.04672906547784805, 0.03428018093109131, -0.617965579032898, -0.11062375456094742, 0.4902994632720947, 0.7736397981643677, 0.3525645434856415, -0.1290713995695114, 0.6200428605079651, -0.27670371532440186, 0.6048418879508972, 0.19240033626556396, -0.6127513647079468, 0.6431125402450562, -0.11306127905845642, 0.5707769989967346, 0.7403804659843445, 0.7179084420204163, -0.8411198258399963, 0.14621102809906006, -0.6625434160232544, -0.5837940573692322, 0.5439070463180542, -0.24903500080108643, 0.37793174386024475, 0.04229360073804855, 0.39922308921813965, 0.12746892869472504, 0.22802120447158813, -0.9867086410522461, -0.7297871708869934, -0.285194456577301, -0.40897732973098755, -0.16770058870315552, -0.5469920039176941, -0.1327883005142212, -0.6092073917388916, 0.6883354783058167, -0.17152149975299835, 0.37761494517326355, -0.196155846118927, -0.022243434563279152, 0.04568009078502655, 0.07175860553979874, 0.550331711769104, 0.42738065123558044, -0.17695221304893494, 0.05529073625802994, 0.012138813734054565, -0.48870372772216797, -0.10804009437561035, 0.720233678817749, 0.05645660683512688, -0.08759681135416031, 0.6177115440368652, 0.7034063339233398, -0.4074341952800751, -0.06619666516780853, 0.30353087186813354, -0.3284909725189209, -0.18770980834960938, -0.7745770812034607, -0.07729692757129669, 0.12162185460329056, 0.13985982537269592, -0.04564215615391731, -0.08082032948732376, 0.1648356169462204, -0.3180202841758728, 0.28258004784584045, -0.021626755595207214, -0.5444439053535461, -0.5960643887519836, 0.4140154719352722, 0.7111473679542542, -0.400287389755249, 0.7065935134887695, -0.4114874601364136, -0.6208736896514893, 0.3994879126548767, 0.2562405467033386, 0.6851133704185486, -0.8520494103431702, 0.39151015877723694, 0.661750316619873, 0.18986251950263977, 0.10842028260231018, 0.7156071662902832, -0.45689502358436584, -0.8422797918319702, -0.23990632593631744, -0.45406317710876465, -0.15668587386608124, -0.20649461448192596, -1.0224019289016724, 0.15960754454135895, -0.41684383153915405, -0.2848033607006073, 0.0789218619465828, 0.023087337613105774, -0.8320509791374207, -0.0016296011162921786, 0.2806587219238281, 1.1795531511306763, -1.119840145111084, 0.9031532406806946, 0.9850288033485413, -0.4487059414386749, -0.3462708294391632, -0.2934713363647461, 0.1369098424911499, -0.6080878973007202, 0.1847349852323532, 0.29860934615135193, 0.19633935391902924, -0.39511802792549133, -0.6601598262786865, -0.6597540974617004, 1.0211738348007202, 0.19641509652137756, -0.7251970767974854, 0.22910889983177185, -0.1331150382757187, 0.5015206933021545, -0.242839515209198, 0.0429704487323761, 0.49006301164627075, 0.8277945518493652, -0.05897029861807823, -0.9814027547836304, -0.3172905445098877, -0.8204951286315918, -0.1984286606311798, 0.5034973621368408, -0.7071362733840942, 0.26424625515937805, -0.01760944165289402, -0.0949135571718216, -0.15535958111286163, 0.6352252960205078, -0.040644291788339615, 0.5234088897705078, 0.44267258048057556, 0.7310783267021179, 1.081153154373169, -0.38398873805999756, 0.9707262516021729, -0.1509290486574173, 0.43292516469955444, 1.2131133079528809, -0.1384759098291397, 0.3896847665309906, 0.3817957639694214, -0.17647400498390198, 0.3159325420856476, 0.8593558669090271, -0.5001977682113647, 0.2728567123413086, 0.10363389551639557, -0.2768471837043762, -0.29459044337272644, -0.29845452308654785, -0.8061733245849609, 0.3246314525604248, 0.6177308559417725, -0.1705865114927292, -0.02969472110271454, 0.42920437455177307, 0.11202552914619446, -0.14312495291233063, -0.5130195021629333, 0.6873253583908081, -0.0446118488907814, -0.1493302583694458, -0.034120261669158936, -0.039896074682474136, 0.5624299049377441, -0.6570324301719666, -0.2540573179721832, 0.05556865781545639, 0.26386353373527527, -0.49563804268836975, -1.1888524293899536, 0.709693431854248, -0.28487348556518555, -0.36237937211990356, -0.12006580829620361, 0.4837912917137146, -0.8069934844970703, -0.9772497415542603, 0.5693750381469727, 0.21943101286888123, 0.17418509721755981, 0.2807808518409729, -1.2563743591308594, 0.09351060539484024, -0.34962305426597595, 0.06390886753797531, 0.04175382852554321, 0.26144641637802124, -0.1330757588148117, 0.7292934656143188, 0.4640209972858429, 0.05055834725499153, -0.18917639553546906, 0.6947879791259766, 0.6795168519020081, -0.5176700353622437, -0.5357019305229187, -0.5160999894142151, 0.3970358669757843, -0.2996010482311249, -0.43590593338012695, 0.7134791612625122, 1.0024245977401733, 1.2495173215866089, -0.16828720271587372, 0.7226417064666748, -0.3931894302368164, 0.6510763168334961, -0.41426241397857666, 0.7158688902854919, -0.53614342212677, -0.3204834759235382, -0.16903913021087646, -0.804854154586792, -0.938472330570221, 0.49212193489074707, 0.24551109969615936, 0.14607496559619904, 0.5136851668357849, 0.7625172138214111, -0.21876147389411926, -0.10079383105039597, -0.12645891308784485, 0.01490582711994648, 0.2394888997077942, 0.46691808104515076, 0.6826244592666626, -0.2694450616836548, 0.28661414980888367, -0.3343590497970581, -0.501114010810852, 0.053840041160583496, -1.0406196117401123, -1.0044001340866089, -0.9693321585655212, -0.831518292427063, -0.341192364692688, -0.31648164987564087, 1.0363633632659912, 0.9906939268112183, -1.114663004875183, -0.4164522886276245, 0.3312602937221527, 0.23056162893772125, -0.042912762612104416, -0.1461486518383026, 0.5112848281860352, 0.11159217357635498, -0.7266185283660889, -0.36699822545051575, -0.0023953125346451998, 0.5035709142684937, -0.1668359488248825, -0.08795706182718277, -0.13558369874954224, -0.46187832951545715, 0.6507284045219421, 0.4677978754043579, -0.1132236123085022, -0.1638808697462082, -0.49987727403640747, 0.05675984174013138, 0.014726836234331131, 0.9168938398361206, -0.23056042194366455, 0.37235894799232483, 0.5005855560302734, 0.4432373344898224, 0.6631970405578613, -0.2329954355955124, 0.39031970500946045, -0.8693140149116516, 0.40803661942481995, -0.1166350319981575, 0.6035398244857788, 0.3932320773601532, -0.41944050788879395, 0.7402992844581604, 0.558697521686554, -0.3850491940975189, -0.4939258396625519, 0.25317302346229553, -1.3215888738632202, 0.24362987279891968, 0.9507456421852112, 0.12355047464370728, -0.3130892813205719, -0.22934186458587646, -0.3827279806137085, 0.3225111663341522, -0.6996622085571289, 0.2602112293243408, 0.7638835310935974, 0.20910748839378357, -0.2790302038192749, -0.16306008398532867, 0.5206596851348877, -0.4956296384334564, -1.2257434129714966, 0.1862354427576065, 0.5080745816230774, 0.15126048028469086, 0.4953078031539917, 0.787708580493927, -0.29326704144477844, 0.3230654299259186, 0.11304719746112823, 0.511702299118042, -0.23747245967388153, -0.7765547633171082, -0.4353874623775482, -0.06057138741016388, -0.0119644645601511, -0.4525827169418335 ]
open-llm-leaderboard/details_upstage__Llama-2-70b-instruct
open-llm-leaderboard
2023-10-17T12:48:37Z
200
0
[ "region:us" ]
null
2023-08-17T23:49:28Z
--- pretty_name: Evaluation run of upstage/Llama-2-70b-instruct dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [upstage/Llama-2-70b-instruct](https://huggingface.co/upstage/Llama-2-70b-instruct)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_upstage__Llama-2-70b-instruct\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-17T12:48:24.237609](https://huggingface.co/datasets/open-llm-leaderboard/details_upstage__Llama-2-70b-instruct/blob/main/results_2023-10-17T12-48-24.237609.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.49989513422818793,\n\ \ \"em_stderr\": 0.005120467878578845,\n \"f1\": 0.5841736577181234,\n\ \ \"f1_stderr\": 0.004671177225967014,\n \"acc\": 0.5754715400500128,\n\ \ \"acc_stderr\": 0.011730426388075654\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.49989513422818793,\n \"em_stderr\": 0.005120467878578845,\n\ \ \"f1\": 0.5841736577181234,\n \"f1_stderr\": 0.004671177225967014\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.32221379833206976,\n \ \ \"acc_stderr\": 0.01287243548118878\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8287292817679558,\n \"acc_stderr\": 0.010588417294962526\n\ \ }\n}\n```" repo_url: https://huggingface.co/upstage/Llama-2-70b-instruct leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|arc:challenge|25_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T16:38:35.808290.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_17T12_48_24.237609 path: - '**/details_harness|drop|3_2023-10-17T12-48-24.237609.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-17T12-48-24.237609.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_17T12_48_24.237609 path: - '**/details_harness|gsm8k|5_2023-10-17T12-48-24.237609.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-17T12-48-24.237609.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hellaswag|10_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:38:35.808290.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:38:35.808290.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T16_38_35.808290 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T16:38:35.808290.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T16:38:35.808290.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_17T12_48_24.237609 path: - '**/details_harness|winogrande|5_2023-10-17T12-48-24.237609.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-17T12-48-24.237609.parquet' - config_name: results data_files: - split: 2023_07_31T16_38_35.808290 path: - results_2023-07-31T16:38:35.808290.parquet - split: 2023_10_17T12_48_24.237609 path: - results_2023-10-17T12-48-24.237609.parquet - split: latest path: - results_2023-10-17T12-48-24.237609.parquet --- # Dataset Card for Evaluation run of upstage/Llama-2-70b-instruct ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/upstage/Llama-2-70b-instruct - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [upstage/Llama-2-70b-instruct](https://huggingface.co/upstage/Llama-2-70b-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_upstage__Llama-2-70b-instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T12:48:24.237609](https://huggingface.co/datasets/open-llm-leaderboard/details_upstage__Llama-2-70b-instruct/blob/main/results_2023-10-17T12-48-24.237609.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.49989513422818793, "em_stderr": 0.005120467878578845, "f1": 0.5841736577181234, "f1_stderr": 0.004671177225967014, "acc": 0.5754715400500128, "acc_stderr": 0.011730426388075654 }, "harness|drop|3": { "em": 0.49989513422818793, "em_stderr": 0.005120467878578845, "f1": 0.5841736577181234, "f1_stderr": 0.004671177225967014 }, "harness|gsm8k|5": { "acc": 0.32221379833206976, "acc_stderr": 0.01287243548118878 }, "harness|winogrande|5": { "acc": 0.8287292817679558, "acc_stderr": 0.010588417294962526 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3484547734260559, -0.5411258935928345, 0.278513103723526, 0.2864886522293091, -0.22064058482646942, 0.2640617787837982, -0.3591436445713043, -0.14369437098503113, 0.4603451192378998, 0.5826770067214966, -0.7486018538475037, -0.8663275837898254, -0.7057149410247803, 0.20391903817653656, -0.3045719861984253, 1.1568598747253418, -0.2396036684513092, -0.2433132529258728, -0.016390500590205193, -0.18664850294589996, -0.40166574716567993, -0.468401163816452, -0.4870937168598175, -0.5057970881462097, 0.22413867712020874, 0.6772934794425964, 0.39620882272720337, 0.7164137363433838, 0.5953435301780701, 0.38574865460395813, -0.2679932117462158, 0.2216910868883133, -0.3837222456932068, -0.15284286439418793, 0.2771950960159302, -0.6491854190826416, -0.7184006571769714, 0.032574478536844254, 0.7316438555717468, 0.41639944911003113, -0.2928326725959778, 0.6827502846717834, 0.12756502628326416, 0.6350216269493103, -0.3753404915332794, 0.311347633600235, -0.42506590485572815, -0.040102940052747726, -0.45307567715644836, -0.09257703274488449, 0.07163981348276138, -0.2008398175239563, -0.28363004326820374, -0.6127533912658691, 0.11597054451704025, 0.06267278641462326, 1.084822416305542, 0.23333683609962463, -0.16036038100719452, -0.20976579189300537, -0.2969103753566742, 0.8784857988357544, -0.8167408108711243, -0.11401057243347168, 0.6753302216529846, 0.07877818495035172, -0.3145565688610077, -0.687082827091217, -0.5076441168785095, -0.11724698543548584, -0.23375260829925537, 0.15174420177936554, -0.09121063351631165, -0.12444566935300827, 0.3962586224079132, 0.6886605024337769, -0.5172704458236694, 0.057358115911483765, -0.518556535243988, -0.14935290813446045, 1.0091047286987305, 0.3740006983280182, 0.07177584618330002, -0.569438636302948, -0.4254353940486908, -0.39496785402297974, -0.5467637777328491, 0.2334509640932083, 0.4144814610481262, 0.3292243778705597, -0.6194960474967957, 0.8318593502044678, -0.469679594039917, 0.4844415485858917, -0.04545688256621361, -0.1605696976184845, 0.7379100918769836, -0.6761540770530701, -0.3136330246925354, -0.013974340632557869, 0.9288738965988159, 0.44477441906929016, 0.07435719668865204, 0.25850018858909607, -0.20997443795204163, 0.022038094699382782, 0.08573343604803085, -0.681399941444397, -0.11345590651035309, 0.416655570268631, -0.4834756851196289, -0.5047950744628906, 0.16631779074668884, -0.9745261669158936, -0.23168116807937622, -0.32807815074920654, 0.19320517778396606, -0.02330363541841507, -0.2756196856498718, -0.08768172562122345, -0.13568024337291718, 0.21213944256305695, 0.15681861340999603, -0.542904257774353, 0.32148584723472595, 0.6293253302574158, 0.9784762263298035, -0.05773437023162842, -0.3569430112838745, -0.2570505142211914, -0.11784134060144424, -0.1398545503616333, 0.5056933164596558, -0.21989278495311737, -0.46374693512916565, -0.23520991206169128, 0.2947508692741394, -0.22950460016727448, -0.6044607162475586, 0.696588933467865, -0.26820844411849976, 0.1944550722837448, -0.3161037862300873, -0.41921478509902954, -0.23562918603420258, 0.30531933903694153, -0.5839499235153198, 1.5433101654052734, 0.35506197810173035, -0.8347401022911072, 0.08265716582536697, -0.8608732223510742, -0.2175290882587433, 0.028131045401096344, -0.11000609397888184, -0.5828281044960022, -0.17597265541553497, 0.13250023126602173, 0.5542396903038025, -0.46807438135147095, 0.140104740858078, -0.23953714966773987, -0.42824357748031616, 0.14839601516723633, -0.08866101503372192, 1.1256262063980103, 0.15556930005550385, -0.5723779797554016, 0.1444564312696457, -1.1334228515625, 0.0852656438946724, 0.42153793573379517, -0.6315498352050781, -0.19472861289978027, -0.27971577644348145, 0.09845104068517685, 0.12785623967647552, 0.482614129781723, -0.6069090962409973, 0.38886016607284546, -0.12382840365171432, 0.3705245852470398, 1.0378798246383667, -0.003637404181063175, 0.22617042064666748, -0.424683153629303, 0.5188561677932739, -0.054519906640052795, 0.31369879841804504, 0.1943649798631668, -0.6061088442802429, -0.8705509901046753, -0.22559425234794617, 0.1168685257434845, 0.7282527685165405, -0.41527387499809265, 0.7442378997802734, -0.3673698902130127, -0.6848974823951721, -0.7369141578674316, 0.20949919521808624, 0.528918981552124, 0.5537094473838806, 0.3385797142982483, -0.3389401137828827, -0.8202391862869263, -0.9629818797111511, 0.13190695643424988, -0.22054073214530945, -0.09420221298933029, 0.3973054587841034, 0.9939219355583191, -0.4331786334514618, 0.6345263123512268, -0.7138200998306274, -0.41372159123420715, -0.29041633009910583, -0.023483941331505775, 0.7797858119010925, 0.44523486495018005, 0.4548323154449463, -0.6549793481826782, -0.27771440148353577, -0.057902008295059204, -0.8019763231277466, -0.2356042116880417, -0.10106299072504044, -0.2895587384700775, 0.30030694603919983, -0.08128073066473007, -0.5558239221572876, 0.5608877539634705, 0.5644057393074036, -0.5647534132003784, 0.6817042827606201, -0.14070096611976624, 0.44932615756988525, -1.269134283065796, 0.17808398604393005, -0.0329357348382473, 0.08438772708177567, -0.47919487953186035, -0.13699841499328613, 0.00019959104247391224, 0.29557907581329346, -0.34587979316711426, 0.5321608781814575, -0.3876883387565613, -0.2867634892463684, 0.029166948050260544, 0.08145196735858917, -0.01381687168031931, 0.5479249954223633, -0.3429515063762665, 0.7856954336166382, 0.4597240686416626, -0.3869178295135498, 0.4206993579864502, 0.5589236617088318, -0.6190066337585449, 0.308759480714798, -0.6008305549621582, 0.09751772880554199, 0.19768217206001282, 0.23971523344516754, -1.0085333585739136, -0.5097147226333618, 0.4523519277572632, -0.51801997423172, 0.25493279099464417, -0.19354753196239471, -0.586925208568573, -0.545106828212738, -0.5498504042625427, 0.1391422301530838, 0.5030592679977417, -0.49984240531921387, 0.2577025592327118, 0.5104565024375916, 0.040311962366104126, -0.7581350803375244, -0.8019556999206543, -0.07139547169208527, -0.4236994981765747, -0.7065975069999695, 0.42266178131103516, -0.20629559457302094, -0.33896878361701965, -0.07242880761623383, -0.0058022793382406235, -0.025890473276376724, 0.21191862225532532, 0.3688138425350189, 0.5303484797477722, 0.030888987705111504, -0.33799049258232117, -0.10922949016094208, -0.11514144390821457, 0.11837302148342133, 0.19983012974262238, 0.5875292420387268, -0.22945992648601532, -0.2908530831336975, -0.28088656067848206, 0.17878805100917816, 0.48967018723487854, -0.044060904532670975, 0.710675060749054, 0.5648281574249268, -0.2373466193675995, 0.025011830031871796, -0.3626609444618225, 0.07606323063373566, -0.48588353395462036, 0.3730626106262207, -0.27027198672294617, -0.7374244928359985, 0.8816512823104858, 0.21718081831932068, 0.15226013958454132, 0.6875108480453491, 0.6533124446868896, 0.005203759763389826, 0.7729543447494507, 0.23887766897678375, -0.14876526594161987, 0.4565362334251404, -0.797566294670105, -0.2360515147447586, -1.1690673828125, -0.443840354681015, -0.45242488384246826, -0.3824467062950134, -0.762115478515625, -0.4099825620651245, 0.31101706624031067, 0.21222351491451263, -0.5091078877449036, 0.5407540202140808, -0.6713356971740723, 0.20917758345603943, 0.6975100636482239, 0.24683932960033417, 0.18457040190696716, 0.0227626021951437, -0.03970290720462799, 0.29431048035621643, -0.4737485647201538, -0.39220476150512695, 1.417252779006958, 0.2168959528207779, 0.5947824716567993, -0.05237843468785286, 0.8845838308334351, 0.4493064284324646, 0.2951906621456146, -0.4320535957813263, 0.6714308261871338, -0.09186940640211105, -0.5986242294311523, -0.17343106865882874, -0.48614615201950073, -0.8931846618652344, 0.22271880507469177, 0.02422950230538845, -0.9894089698791504, 0.11291764676570892, 0.06195151433348656, -0.10052508860826492, 0.3837163746356964, -0.49420642852783203, 0.6750873327255249, -0.29314160346984863, -0.300307035446167, -0.028334196656942368, -0.9109130501747131, 0.45170098543167114, -0.03823702782392502, 0.3929767310619354, -0.2718752920627594, 0.003311527194455266, 1.1643158197402954, -0.6463868021965027, 0.7974256277084351, -0.13654260337352753, 0.0768665075302124, 0.39538809657096863, -0.33901622891426086, 0.6927277445793152, -0.04993617907166481, -0.23379677534103394, 0.5542206168174744, -0.2600979208946228, -0.2638952136039734, -0.3031618595123291, 0.8228936195373535, -0.9223014712333679, -0.41104888916015625, -0.370334267616272, -0.5565536022186279, 0.27962151169776917, 0.14224833250045776, 0.4737943708896637, 0.3070776164531708, 0.18772533535957336, 0.2224399745464325, 0.24855439364910126, -0.13314023613929749, 0.5183964371681213, 0.3847557008266449, -0.1073295846581459, -0.7495418787002563, 0.5495765805244446, 0.2752324938774109, 0.13666753470897675, 0.19079777598381042, 0.0454549640417099, -0.6008058190345764, -0.3994999825954437, -0.6089463233947754, 0.290903240442276, -0.6669583916664124, -0.4015938341617584, -0.40977099537849426, -0.17000606656074524, -0.31184178590774536, -0.051460206508636475, -0.4992066025733948, -0.5204684734344482, -0.4921916723251343, -0.258147269487381, 0.6986182332038879, 0.6650481820106506, -0.4659223258495331, 0.3621695041656494, -0.6999022364616394, 0.17929352819919586, -0.16288769245147705, 0.3545612096786499, -0.059781014919281006, -0.7013709545135498, -0.3421350121498108, 0.20058615505695343, -0.3879588544368744, -0.9362788796424866, 0.5148798227310181, 0.004830490797758102, 0.6759536266326904, 0.10242923349142075, 0.09575177729129791, 0.7814273834228516, -0.11565549671649933, 1.0031918287277222, 0.032685838639736176, -0.7902963757514954, 0.8036434054374695, -0.22877688705921173, 0.0242435522377491, 0.524642825126648, 0.21140095591545105, -0.46528175473213196, -0.19829954206943512, -0.9054294228553772, -1.2038986682891846, 1.0306209325790405, 0.6316651701927185, -0.32335206866264343, 0.18433435261249542, 0.2873431444168091, -0.11677044630050659, 0.24152715504169464, -0.6481033563613892, -0.8293622732162476, -0.022937284782528877, -0.22973696887493134, -0.13671182096004486, -0.13524897396564484, -0.494712769985199, -0.46770939230918884, 0.8959981799125671, 0.09877865761518478, 0.48118308186531067, 0.1961814910173416, -0.038208089768886566, -0.13483452796936035, 0.29978519678115845, 0.4997875392436981, 0.7228817343711853, -0.41647300124168396, -0.0850766971707344, 0.37485384941101074, -0.6093336939811707, 0.22299863398075104, 0.347215861082077, -0.11514695733785629, -0.14229920506477356, 0.49970343708992004, 0.9284751415252686, 0.177976593375206, -0.37382304668426514, 0.4599834084510803, 0.08736830949783325, -0.2250458151102066, -0.5561994910240173, 0.09635190665721893, -0.050094425678253174, 0.4301980137825012, 0.4707861542701721, -0.1210160106420517, -0.018689490854740143, -0.3012206554412842, 0.32006070017814636, 0.19429317116737366, -0.06822933256626129, -0.32407045364379883, 0.6738240718841553, -0.0021494412794709206, -0.3781373202800751, 0.7738233208656311, -0.12003722786903381, -0.6269385814666748, 1.159690022468567, 0.3551415503025055, 0.7858946919441223, -0.07124467194080353, 0.1628255993127823, 0.47092047333717346, 0.3148735463619232, -0.17247390747070312, 0.5917885303497314, 0.01757427304983139, -0.6030077934265137, -0.35324493050575256, -0.8608931303024292, -0.2444506734609604, 0.3991180658340454, -1.033033013343811, 0.3287041187286377, -0.026247268542647362, -0.21288615465164185, -0.16847307980060577, 0.4296668767929077, -0.8488790988922119, 0.1602846086025238, 0.04911062493920326, 0.8481397032737732, -1.0226261615753174, 0.6493731141090393, 0.9171915054321289, -0.5001516938209534, -0.9473757147789001, -0.3543320298194885, 0.1765207052230835, -1.0071053504943848, 0.49083906412124634, 0.24363934993743896, 0.32491764426231384, -0.06925053894519806, -0.6153437495231628, -1.1150975227355957, 1.6094049215316772, 0.15415780246257782, -0.5852726101875305, 0.18813073635101318, 0.2215653359889984, 0.39369744062423706, -0.32717278599739075, 0.5612511038780212, 0.7710356712341309, 0.7332766652107239, -0.022041000425815582, -0.9003208875656128, 0.37394168972969055, -0.4763949513435364, -0.005553111899644136, 0.2622109055519104, -0.9501398801803589, 0.9811038374900818, -0.28455665707588196, -0.1089528277516365, 0.05318167433142662, 0.4419327676296234, 0.706131100654602, 0.42939382791519165, 0.5073010325431824, 0.7550837397575378, 0.7381953597068787, -0.3318117558956146, 1.1774779558181763, -0.22841162979602814, 0.8449716567993164, 1.0417560338974, 0.06233815848827362, 0.6905162334442139, 0.3225938081741333, -0.6066964268684387, 0.5633078217506409, 0.938568115234375, -0.35957634449005127, 0.42794182896614075, 0.24007314443588257, -0.002426889492198825, -0.025146376341581345, -0.04074233025312424, -0.5351434350013733, 0.5081461071968079, 0.18280765414237976, -0.5313381552696228, -0.10931478440761566, -0.313527911901474, 0.2403721660375595, -0.33975183963775635, -0.30602267384529114, 0.5195040702819824, -0.031369712203741074, -0.4958532452583313, 0.7216231226921082, -0.17055389285087585, 0.766007661819458, -0.6679064631462097, -0.11284934729337692, -0.3685772716999054, 0.2837024927139282, -0.46011579036712646, -1.0972708463668823, 0.17164458334445953, 0.1472104787826538, -0.25781330466270447, -0.12560079991817474, 0.6612415909767151, -0.2573772668838501, -0.6171063780784607, 0.5139114260673523, 0.2867877781391144, 0.37767890095710754, 0.05373287573456764, -0.9238053560256958, 0.21492862701416016, 0.36152195930480957, -0.8387740254402161, 0.45678481459617615, 0.17354556918144226, 0.07011421769857407, 0.5878992080688477, 0.754899799823761, 0.08913881331682205, 0.07324081659317017, -0.02911352552473545, 1.1499990224838257, -0.7584241032600403, -0.3604143559932709, -0.79786217212677, 0.9599174857139587, -0.3380756676197052, -0.6663258671760559, 0.8010568022727966, 0.8060512542724609, 0.9155016541481018, 0.09415847063064575, 0.7635138630867004, -0.4856247007846832, 0.46678468585014343, -0.3259481191635132, 0.8097171187400818, -0.8422102928161621, 0.3794468343257904, -0.17461353540420532, -0.826521635055542, 0.014790319837629795, 0.551091730594635, -0.05649065971374512, -0.047684479504823685, 0.4011518657207489, 0.9871801733970642, 0.11263250559568405, 0.062258969992399216, -0.06932458281517029, 0.3620923161506653, 0.2434149980545044, 0.5860493183135986, 0.7708434462547302, -0.6612982749938965, 0.5141209363937378, -0.7533962726593018, -0.4915730953216553, -0.15387147665023804, -0.7151704430580139, -0.6673481464385986, -0.5795841217041016, -0.21468697488307953, -0.5165007710456848, -0.03125350922346115, 0.9851099848747253, 0.4483082890510559, -0.8287861943244934, -0.4603500962257385, -0.04968496412038803, 0.15783870220184326, -0.17856942117214203, -0.34515804052352905, 0.6322393417358398, 0.024632837623357773, -0.7546500563621521, 0.3456636965274811, -0.09880571812391281, -0.12930427491664886, 0.030856968834996223, -0.20247642695903778, -0.38112738728523254, -0.26373738050460815, 0.47980383038520813, 0.19135265052318573, -0.6923007369041443, -0.36051321029663086, -0.10701427608728409, 0.09133440256118774, 0.2671683728694916, 0.31764358282089233, -0.4741455912590027, 0.07637856155633926, 0.5595455765724182, 0.24167542159557343, 0.656051754951477, 0.08452025055885315, 0.23261642456054688, -0.8055739998817444, -0.04179547727108002, -0.12947693467140198, 0.5149492025375366, 0.2576589286327362, -0.5582287907600403, 1.0895344018936157, 0.3787028193473816, -0.817733108997345, -0.9711701273918152, -0.18060284852981567, -1.1975760459899902, 0.005663753021508455, 1.4879050254821777, -0.2632855474948883, -0.28755712509155273, 0.08468041568994522, -0.16048036515712738, 0.4754485785961151, -0.6500653624534607, 0.684005618095398, 0.6498031616210938, -0.39274951815605164, 0.15862169861793518, -0.5409650206565857, 0.3397747874259949, 0.138536274433136, -1.0402092933654785, 0.02136956714093685, 0.28125444054603577, 0.36686989665031433, 0.25323250889778137, 0.6845012903213501, 0.02683778665959835, -0.1738344132900238, 0.004147315863519907, 0.19038452208042145, -0.26320135593414307, -0.10859382152557373, -0.23952634632587433, 0.01935654692351818, -0.44671326875686646, -0.5466457605361938 ]
open-llm-leaderboard/details_upstage__llama-65b-instruct
open-llm-leaderboard
2023-10-24T19:27:41Z
200
0
[ "region:us" ]
null
2023-08-17T23:49:37Z
--- pretty_name: Evaluation run of upstage/llama-65b-instruct dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [upstage/llama-65b-instruct](https://huggingface.co/upstage/llama-65b-instruct)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_upstage__llama-65b-instruct\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-24T19:27:31.642045](https://huggingface.co/datasets/open-llm-leaderboard/details_upstage__llama-65b-instruct/blob/main/results_2023-10-24T19-27-31.642045.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.454383389261745,\n\ \ \"em_stderr\": 0.005099113352549085,\n \"f1\": 0.5468970218120836,\n\ \ \"f1_stderr\": 0.004699295426287538,\n \"acc\": 0.5364480517576576,\n\ \ \"acc_stderr\": 0.011564851426457474\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.454383389261745,\n \"em_stderr\": 0.005099113352549085,\n\ \ \"f1\": 0.5468970218120836,\n \"f1_stderr\": 0.004699295426287538\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2623199393479909,\n \ \ \"acc_stderr\": 0.012116912419925702\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8105761641673244,\n \"acc_stderr\": 0.011012790432989247\n\ \ }\n}\n```" repo_url: https://huggingface.co/upstage/llama-65b-instruct leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|arc:challenge|25_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T16:32:35.958499.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_17T01_44_05.835561 path: - '**/details_harness|drop|3_2023-10-17T01-44-05.835561.parquet' - split: 2023_10_24T19_27_31.642045 path: - '**/details_harness|drop|3_2023-10-24T19-27-31.642045.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-24T19-27-31.642045.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_17T01_44_05.835561 path: - '**/details_harness|gsm8k|5_2023-10-17T01-44-05.835561.parquet' - split: 2023_10_24T19_27_31.642045 path: - '**/details_harness|gsm8k|5_2023-10-24T19-27-31.642045.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-24T19-27-31.642045.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hellaswag|10_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:32:35.958499.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T16:32:35.958499.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T16_32_35.958499 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T16:32:35.958499.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T16:32:35.958499.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_17T01_44_05.835561 path: - '**/details_harness|winogrande|5_2023-10-17T01-44-05.835561.parquet' - split: 2023_10_24T19_27_31.642045 path: - '**/details_harness|winogrande|5_2023-10-24T19-27-31.642045.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-24T19-27-31.642045.parquet' - config_name: results data_files: - split: 2023_07_31T16_32_35.958499 path: - results_2023-07-31T16:32:35.958499.parquet - split: 2023_10_17T01_44_05.835561 path: - results_2023-10-17T01-44-05.835561.parquet - split: 2023_10_24T19_27_31.642045 path: - results_2023-10-24T19-27-31.642045.parquet - split: latest path: - results_2023-10-24T19-27-31.642045.parquet --- # Dataset Card for Evaluation run of upstage/llama-65b-instruct ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/upstage/llama-65b-instruct - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [upstage/llama-65b-instruct](https://huggingface.co/upstage/llama-65b-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_upstage__llama-65b-instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T19:27:31.642045](https://huggingface.co/datasets/open-llm-leaderboard/details_upstage__llama-65b-instruct/blob/main/results_2023-10-24T19-27-31.642045.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.454383389261745, "em_stderr": 0.005099113352549085, "f1": 0.5468970218120836, "f1_stderr": 0.004699295426287538, "acc": 0.5364480517576576, "acc_stderr": 0.011564851426457474 }, "harness|drop|3": { "em": 0.454383389261745, "em_stderr": 0.005099113352549085, "f1": 0.5468970218120836, "f1_stderr": 0.004699295426287538 }, "harness|gsm8k|5": { "acc": 0.2623199393479909, "acc_stderr": 0.012116912419925702 }, "harness|winogrande|5": { "acc": 0.8105761641673244, "acc_stderr": 0.011012790432989247 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3704652786254883, -0.5293743014335632, 0.27051618695259094, 0.2868466079235077, -0.184626504778862, 0.24473562836647034, -0.35887789726257324, -0.1301431804895401, 0.453772634267807, 0.5652228593826294, -0.7632480263710022, -0.8963569402694702, -0.6558470726013184, 0.21128985285758972, -0.2945503294467926, 1.1441915035247803, -0.22263203561306, -0.22993707656860352, -0.01467572245746851, -0.18821237981319427, -0.38368499279022217, -0.4728914201259613, -0.4598826467990875, -0.49212199449539185, 0.2277335673570633, 0.6768937110900879, 0.3948229253292084, 0.7001380324363708, 0.5929256677627563, 0.39901402592658997, -0.2339855432510376, 0.20155540108680725, -0.39280346035957336, -0.1724637895822525, 0.2975580394268036, -0.6144466400146484, -0.7299512624740601, 0.028332119807600975, 0.756212055683136, 0.42426854372024536, -0.3087952733039856, 0.6447864174842834, 0.09750612825155258, 0.6265797019004822, -0.4078052341938019, 0.296874076128006, -0.42025724053382874, -0.04950954392552376, -0.46191051602363586, -0.10104220360517502, 0.04356244578957558, -0.23432610929012299, -0.30491960048675537, -0.5828641653060913, 0.14054903388023376, 0.05538300424814224, 1.113097906112671, 0.21552368998527527, -0.1447066217660904, -0.1952420473098755, -0.308810293674469, 0.8751702308654785, -0.8509618639945984, -0.10649885982275009, 0.6185004711151123, 0.09174031764268875, -0.2983211278915405, -0.6706885695457458, -0.49854984879493713, -0.11869598180055618, -0.21704305708408356, 0.16214768588542938, -0.09645260125398636, -0.08555811643600464, 0.41543665528297424, 0.7265552878379822, -0.5059933066368103, 0.05583249777555466, -0.5136268138885498, -0.1633487045764923, 1.0148091316223145, 0.3908199071884155, 0.09404714405536652, -0.5777477025985718, -0.4342817962169647, -0.3721751272678375, -0.5409670472145081, 0.2151041477918625, 0.4223152995109558, 0.3374681770801544, -0.5954347848892212, 0.8045734763145447, -0.46665486693382263, 0.5333380699157715, -0.054903700947761536, -0.14685435593128204, 0.7537188529968262, -0.6910120248794556, -0.33705955743789673, 0.0037167873233556747, 0.9580692052841187, 0.3988471031188965, 0.07417310774326324, 0.2888191342353821, -0.23634669184684753, 0.04065367579460144, 0.10440334677696228, -0.7118335366249084, -0.12155431509017944, 0.4058881402015686, -0.46242645382881165, -0.48262789845466614, 0.17132799327373505, -0.9661811590194702, -0.1974238008260727, -0.2986207902431488, 0.20147329568862915, -0.08142126351594925, -0.2370009869337082, -0.047689493745565414, -0.1101173684000969, 0.21621766686439514, 0.15168210864067078, -0.5551245808601379, 0.32141679525375366, 0.5587021112442017, 0.9515774250030518, -0.06216015666723251, -0.35869476199150085, -0.24225637316703796, -0.11389824748039246, -0.10823722183704376, 0.49593690037727356, -0.23627950251102448, -0.4861973524093628, -0.25985538959503174, 0.282177597284317, -0.28323623538017273, -0.5898873209953308, 0.7321643829345703, -0.26124534010887146, 0.23072770237922668, -0.27221161127090454, -0.4570060968399048, -0.22982735931873322, 0.280337393283844, -0.6013542413711548, 1.5162187814712524, 0.3108614683151245, -0.8385862112045288, 0.09687596559524536, -0.8946511149406433, -0.20380795001983643, 0.0588090680539608, -0.1371617615222931, -0.5963792204856873, -0.18159246444702148, 0.18429018557071686, 0.6006714701652527, -0.40649867057800293, 0.1255372166633606, -0.26318359375, -0.4484451115131378, 0.13967590034008026, -0.11707498878240585, 1.1394113302230835, 0.1718910038471222, -0.5511454939842224, 0.18931756913661957, -1.1497949361801147, 0.08954261988401413, 0.38708218932151794, -0.6355520486831665, -0.18026389181613922, -0.31223806738853455, 0.0969567522406578, 0.11158917099237442, 0.4825808107852936, -0.6450582146644592, 0.3829147517681122, -0.12555015087127686, 0.3593812584877014, 1.0356253385543823, 0.011786475777626038, 0.21550819277763367, -0.43056949973106384, 0.5521067380905151, -0.0278882198035717, 0.32026800513267517, 0.14984986186027527, -0.5790112614631653, -0.8548129796981812, -0.2604855000972748, 0.1194329485297203, 0.6973609328269958, -0.41948479413986206, 0.7219147682189941, -0.3951830565929413, -0.6856252551078796, -0.7249067425727844, 0.18957695364952087, 0.4986070394515991, 0.5287307500839233, 0.34029969573020935, -0.3343662917613983, -0.7853138446807861, -0.9633965492248535, 0.12473724037408829, -0.1829085350036621, -0.08616109937429428, 0.41150882840156555, 0.9983505010604858, -0.4135703444480896, 0.6213688850402832, -0.7262026071548462, -0.41758400201797485, -0.2853977084159851, -0.010316076688468456, 0.8063496351242065, 0.4464775621891022, 0.4595007598400116, -0.6675546169281006, -0.272234171628952, -0.058848556131124496, -0.7613853216171265, -0.21961162984371185, -0.11499310284852982, -0.2619923949241638, 0.2941826581954956, -0.06413854658603668, -0.5706765055656433, 0.5650157332420349, 0.563172459602356, -0.5529046058654785, 0.7084100842475891, -0.14226406812667847, 0.45024552941322327, -1.2745273113250732, 0.18697747588157654, -0.008115055039525032, 0.0807441845536232, -0.45975905656814575, -0.15410764515399933, 0.041778892278671265, 0.2554236948490143, -0.3690309226512909, 0.5128031969070435, -0.40531885623931885, -0.27466464042663574, 0.07523085176944733, 0.08194360136985779, -0.013780243694782257, 0.5590059757232666, -0.35385480523109436, 0.786954402923584, 0.45524802803993225, -0.4006410241127014, 0.4382477402687073, 0.5455020666122437, -0.6352448463439941, 0.3208286464214325, -0.5906180143356323, 0.046028394252061844, 0.20467711985111237, 0.22411145269870758, -0.9793581366539001, -0.521456241607666, 0.4054360091686249, -0.5299543738365173, 0.25232037901878357, -0.1672763228416443, -0.5556706190109253, -0.5443249344825745, -0.5232996344566345, 0.13760241866111755, 0.46444541215896606, -0.490741491317749, 0.2734964191913605, 0.48549437522888184, 0.056394875049591064, -0.7590252757072449, -0.7608150839805603, -0.08986133337020874, -0.4398612678050995, -0.7364795207977295, 0.4683390259742737, -0.2266160249710083, -0.3027786314487457, -0.06240257993340492, -0.015900710597634315, -0.03604118153452873, 0.19446492195129395, 0.34415391087532043, 0.54144686460495, 0.015844011679291725, -0.3355843126773834, -0.1753118634223938, -0.07994132488965988, 0.11681199073791504, 0.21929404139518738, 0.5498174428939819, -0.27570658922195435, -0.2972012162208557, -0.27636733651161194, 0.15555918216705322, 0.49699968099594116, -0.055625468492507935, 0.6948863863945007, 0.5681350827217102, -0.23749394714832306, -0.02933015488088131, -0.35358989238739014, 0.0838044211268425, -0.4931042790412903, 0.3475051522254944, -0.277765154838562, -0.7150735259056091, 0.8862305879592896, 0.24014295637607574, 0.1519048810005188, 0.6704316139221191, 0.6380088925361633, 0.015605473890900612, 0.7731096744537354, 0.253818541765213, -0.1471712440252304, 0.48058536648750305, -0.7835037708282471, -0.22983357310295105, -1.1119898557662964, -0.4010799527168274, -0.45994916558265686, -0.3739277124404907, -0.7829408645629883, -0.38725998997688293, 0.3243185877799988, 0.15481078624725342, -0.49591168761253357, 0.525812029838562, -0.6533941626548767, 0.2231743186712265, 0.6782229542732239, 0.2480093389749527, 0.2058333307504654, -0.02185550332069397, -0.07721220701932907, 0.29842662811279297, -0.5160433650016785, -0.37442031502723694, 1.3865066766738892, 0.20708924531936646, 0.647233247756958, -0.07388429343700409, 0.9252612590789795, 0.40921708941459656, 0.2908111810684204, -0.4413761794567108, 0.6971020698547363, -0.08635784685611725, -0.6082193851470947, -0.18104824423789978, -0.527991533279419, -0.89945387840271, 0.2149190455675125, 0.027052907273173332, -0.9728291630744934, 0.1383388191461563, 0.0590791180729866, -0.14092521369457245, 0.38749098777770996, -0.5136783123016357, 0.7195610404014587, -0.2928263247013092, -0.30619269609451294, -0.0012012957595288754, -0.8800156116485596, 0.4555460512638092, -0.011483515612781048, 0.40467119216918945, -0.27310433983802795, 0.03084259293973446, 1.138782024383545, -0.6311687231063843, 0.8016332387924194, -0.13045762479305267, 0.07571949809789658, 0.34217822551727295, -0.3704908490180969, 0.6503471732139587, -0.08678737282752991, -0.22426573932170868, 0.5255284905433655, -0.24166877567768097, -0.27966228127479553, -0.3010419011116028, 0.8488091826438904, -0.9067474007606506, -0.3939147889614105, -0.37694692611694336, -0.5787014961242676, 0.25096502900123596, 0.17460565268993378, 0.45718714594841003, 0.34442782402038574, 0.14808638393878937, 0.2681495249271393, 0.26911288499832153, -0.15096251666545868, 0.5494216084480286, 0.36737731099128723, -0.12894733250141144, -0.7329821586608887, 0.5652525424957275, 0.26375967264175415, 0.14173653721809387, 0.19864071905612946, 0.026036860421299934, -0.5721001029014587, -0.41075441241264343, -0.5917156338691711, 0.28004151582717896, -0.679888904094696, -0.38696882128715515, -0.3984605073928833, -0.18199309706687927, -0.3090701997280121, -0.05405990779399872, -0.500163733959198, -0.4977458417415619, -0.4784940183162689, -0.2505795657634735, 0.6622971296310425, 0.6456022262573242, -0.4290107190608978, 0.3555675745010376, -0.7195504903793335, 0.16958296298980713, -0.18489328026771545, 0.3813747465610504, -0.10655035823583603, -0.6686186194419861, -0.372620552778244, 0.19048146903514862, -0.4163970351219177, -0.9366686940193176, 0.5436409711837769, 0.014991840347647667, 0.7004480361938477, 0.09391077607870102, 0.11550157517194748, 0.7826446294784546, -0.1011495590209961, 0.9938351511955261, 0.011800922453403473, -0.8008520603179932, 0.7934187054634094, -0.2747637629508972, 0.09308822453022003, 0.5129441618919373, 0.2521495819091797, -0.428963303565979, -0.20542722940444946, -0.942035973072052, -1.2032819986343384, 1.0065807104110718, 0.6134482026100159, -0.33527040481567383, 0.18458427488803864, 0.22680021822452545, -0.12708249688148499, 0.2369314581155777, -0.6738048195838928, -0.792724072933197, -0.02422173134982586, -0.2596058249473572, -0.14644937217235565, -0.07581523805856705, -0.4675969183444977, -0.463781476020813, 0.9036831259727478, 0.07096800208091736, 0.46165379881858826, 0.20346499979496002, -0.04992560297250748, -0.10536779463291168, 0.3308333456516266, 0.49125024676322937, 0.747382402420044, -0.4468213617801666, -0.10656024515628815, 0.3500940501689911, -0.5983917713165283, 0.18801875412464142, 0.36443865299224854, -0.10442190617322922, -0.1330551952123642, 0.4615418612957001, 0.9418957233428955, 0.22393225133419037, -0.35129499435424805, 0.46990513801574707, 0.06473356485366821, -0.22027422487735748, -0.5536887049674988, 0.11855652928352356, -0.040358591824769974, 0.38254329562187195, 0.4601530134677887, -0.10381018370389938, -0.0191030316054821, -0.29338765144348145, 0.34047430753707886, 0.19996194541454315, -0.07327243685722351, -0.30593645572662354, 0.6596671342849731, 0.03544961288571358, -0.3316744267940521, 0.7625342607498169, -0.07443112134933472, -0.5929163694381714, 1.1426645517349243, 0.321105033159256, 0.7946350574493408, -0.09828896820545197, 0.16350597143173218, 0.46817976236343384, 0.3088282644748688, -0.13384920358657837, 0.5933115482330322, 0.01828601583838463, -0.637576162815094, -0.372146874666214, -0.8723375201225281, -0.260120153427124, 0.4089515507221222, -1.0440045595169067, 0.35772955417633057, -0.023695001378655434, -0.2004925161600113, -0.12641620635986328, 0.43109768629074097, -0.8660122752189636, 0.1833261102437973, 0.05580044537782669, 0.8715760707855225, -1.0099486112594604, 0.6634603142738342, 0.9202100038528442, -0.5144546031951904, -0.9451783299446106, -0.34710779786109924, 0.15229532122612, -0.9886242747306824, 0.47240906953811646, 0.2704876959323883, 0.3107195198535919, -0.027134131640195847, -0.6101973056793213, -1.0768080949783325, 1.595532774925232, 0.1707703024148941, -0.5562608242034912, 0.1732422262430191, 0.19132070243358612, 0.3701293468475342, -0.3517317771911621, 0.5656872987747192, 0.7648531794548035, 0.7206575274467468, -0.019701937213540077, -0.9061396718025208, 0.35811248421669006, -0.4893803596496582, -0.014498960226774216, 0.2626284658908844, -0.921735405921936, 0.9778704047203064, -0.288688063621521, -0.13855837285518646, 0.030003590509295464, 0.43344390392303467, 0.6941343545913696, 0.38372382521629333, 0.49802106618881226, 0.7480162978172302, 0.7306999564170837, -0.34222325682640076, 1.1759281158447266, -0.23693877458572388, 0.8233555555343628, 1.0392775535583496, 0.0764193907380104, 0.6800283193588257, 0.3106922209262848, -0.5787240266799927, 0.5847440361976624, 0.9221499562263489, -0.3641321361064911, 0.3920869529247284, 0.2525586783885956, -0.015763744711875916, -0.040273912250995636, -0.02150987647473812, -0.5164692401885986, 0.5168240666389465, 0.19238241016864777, -0.5339856743812561, -0.12646237015724182, -0.297456830739975, 0.21475176513195038, -0.3298969268798828, -0.32455334067344666, 0.5081124901771545, -0.038260214030742645, -0.48433786630630493, 0.7331744432449341, -0.19421960413455963, 0.7756227254867554, -0.692275881767273, -0.09984651952981949, -0.3693409264087677, 0.3126233220100403, -0.477397620677948, -1.0824391841888428, 0.16047102212905884, 0.0974317342042923, -0.2335447371006012, -0.17050357162952423, 0.6711537837982178, -0.2562744915485382, -0.6099517345428467, 0.48906779289245605, 0.2807084918022156, 0.36507225036621094, 0.06557764858007431, -0.9197868704795837, 0.2283478081226349, 0.36707305908203125, -0.8418319225311279, 0.4358598291873932, 0.19839487969875336, 0.08923369646072388, 0.6124930381774902, 0.734687089920044, 0.10949963331222534, 0.0900530144572258, -0.027023356407880783, 1.1529642343521118, -0.8023357391357422, -0.3783985376358032, -0.7975609302520752, 0.9682693481445312, -0.34805169701576233, -0.647607684135437, 0.8457531929016113, 0.7889536023139954, 0.9566483497619629, 0.08470439910888672, 0.7725928425788879, -0.4758545756340027, 0.440531462430954, -0.33304572105407715, 0.8015655875205994, -0.8172973990440369, 0.40381819009780884, -0.20538128912448883, -0.8190779685974121, 0.011002862825989723, 0.5443929433822632, -0.08999233692884445, -0.000018674491002457216, 0.4180915951728821, 0.9858577847480774, 0.08376028388738632, 0.11595650762319565, -0.020754428580403328, 0.37604692578315735, 0.2435920387506485, 0.5950990915298462, 0.7736422419548035, -0.641531765460968, 0.5293456315994263, -0.7506210207939148, -0.4832231104373932, -0.14093567430973053, -0.7446065545082092, -0.6496480703353882, -0.5595359206199646, -0.21508575975894928, -0.47164222598075867, -0.04364771395921707, 0.9576164484024048, 0.4558465778827667, -0.8253409266471863, -0.48414376378059387, -0.043222904205322266, 0.15070894360542297, -0.18209713697433472, -0.3512279689311981, 0.599080502986908, -0.0004965838161297143, -0.7693207859992981, 0.36941084265708923, -0.10826767235994339, -0.13487856090068817, -0.006203773431479931, -0.17071610689163208, -0.399107962846756, -0.26607635617256165, 0.44538426399230957, 0.17989525198936462, -0.6459867358207703, -0.35293281078338623, -0.10425136983394623, 0.1017078161239624, 0.2824501395225525, 0.3179519474506378, -0.4835110902786255, 0.08334001898765564, 0.5617429614067078, 0.24580806493759155, 0.6662084460258484, 0.08679990470409393, 0.2928522229194641, -0.8364121913909912, -0.083636574447155, -0.08820967376232147, 0.5412629246711731, 0.2785447835922241, -0.5575807094573975, 1.0518302917480469, 0.3748715817928314, -0.8416143655776978, -0.953665554523468, -0.1672949492931366, -1.176737666130066, 0.01211019977927208, 1.4503419399261475, -0.2804620563983917, -0.3155272305011749, 0.09963483363389969, -0.14441870152950287, 0.42407867312431335, -0.6614859700202942, 0.6562547087669373, 0.6670849919319153, -0.4108249843120575, 0.14245986938476562, -0.5620235800743103, 0.35245105624198914, 0.13110564649105072, -1.0487520694732666, 0.030327560380101204, 0.3404538333415985, 0.3666757047176361, 0.25111818313598633, 0.6945027709007263, -0.00006378622492775321, -0.17705772817134857, -0.01073607336729765, 0.23161844909191132, -0.24391086399555206, -0.11443675309419632, -0.25485074520111084, 0.04820021241903305, -0.478061318397522, -0.5570698976516724 ]
open-llm-leaderboard/details_NousResearch__Nous-Hermes-llama-2-7b
open-llm-leaderboard
2023-10-22T01:50:15Z
200
0
[ "region:us" ]
null
2023-08-17T23:50:28Z
--- pretty_name: Evaluation run of NousResearch/Nous-Hermes-llama-2-7b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NousResearch__Nous-Hermes-llama-2-7b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T01:50:03.524306](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-llama-2-7b/blob/main/results_2023-10-22T01-50-03.524306.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.14649748322147652,\n\ \ \"em_stderr\": 0.0036212385599472124,\n \"f1\": 0.21412122483221444,\n\ \ \"f1_stderr\": 0.0037396442766702157,\n \"acc\": 0.3989754501778092,\n\ \ \"acc_stderr\": 0.009370647012687763\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.14649748322147652,\n \"em_stderr\": 0.0036212385599472124,\n\ \ \"f1\": 0.21412122483221444,\n \"f1_stderr\": 0.0037396442766702157\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0576194086429113,\n \ \ \"acc_stderr\": 0.006418593319822861\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552667\n\ \ }\n}\n```" repo_url: https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|arc:challenge|25_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T15:03:15.265717.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T01_50_03.524306 path: - '**/details_harness|drop|3_2023-10-22T01-50-03.524306.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T01-50-03.524306.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T01_50_03.524306 path: - '**/details_harness|gsm8k|5_2023-10-22T01-50-03.524306.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T01-50-03.524306.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hellaswag|10_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:03:15.265717.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T15:03:15.265717.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T15_03_15.265717 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T15:03:15.265717.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T15:03:15.265717.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T01_50_03.524306 path: - '**/details_harness|winogrande|5_2023-10-22T01-50-03.524306.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T01-50-03.524306.parquet' - config_name: results data_files: - split: 2023_07_31T15_03_15.265717 path: - results_2023-07-31T15:03:15.265717.parquet - split: 2023_10_22T01_50_03.524306 path: - results_2023-10-22T01-50-03.524306.parquet - split: latest path: - results_2023-10-22T01-50-03.524306.parquet --- # Dataset Card for Evaluation run of NousResearch/Nous-Hermes-llama-2-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NousResearch__Nous-Hermes-llama-2-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T01:50:03.524306](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-llama-2-7b/blob/main/results_2023-10-22T01-50-03.524306.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.14649748322147652, "em_stderr": 0.0036212385599472124, "f1": 0.21412122483221444, "f1_stderr": 0.0037396442766702157, "acc": 0.3989754501778092, "acc_stderr": 0.009370647012687763 }, "harness|drop|3": { "em": 0.14649748322147652, "em_stderr": 0.0036212385599472124, "f1": 0.21412122483221444, "f1_stderr": 0.0037396442766702157 }, "harness|gsm8k|5": { "acc": 0.0576194086429113, "acc_stderr": 0.006418593319822861 }, "harness|winogrande|5": { "acc": 0.7403314917127072, "acc_stderr": 0.012322700705552667 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.392622172832489, -0.6050981283187866, 0.26677054166793823, 0.18932972848415375, -0.18130768835544586, 0.154188334941864, -0.32316508889198303, -0.2189580798149109, 0.4989151954650879, 0.6146122217178345, -0.7528239488601685, -0.9705303907394409, -0.6695604920387268, 0.25931957364082336, -0.24284490942955017, 1.238102674484253, -0.24354150891304016, -0.26770585775375366, 0.025090625509619713, -0.3669239580631256, -0.2359488606452942, -0.39297154545783997, -0.4861012101173401, -0.36101439595222473, 0.40796270966529846, 0.7016183733940125, 0.302886426448822, 0.7630798816680908, 0.689601480960846, 0.3328581750392914, -0.16392634809017181, 0.2281857579946518, -0.468103289604187, -0.009276140481233597, 0.2224317491054535, -0.6512152552604675, -0.8016564846038818, 0.16861049830913544, 0.6476921439170837, 0.4171193838119507, -0.19737249612808228, 0.7032176852226257, 0.09541667997837067, 0.5298885107040405, -0.4433324337005615, 0.34241148829460144, -0.37856346368789673, -0.10384058952331543, -0.4084641933441162, -0.24918852746486664, 0.014002079144120216, -0.3194959759712219, -0.12620651721954346, -0.6711348295211792, 0.2437402755022049, 0.0800754576921463, 1.101265549659729, 0.21157529950141907, -0.22078344225883484, -0.2713642418384552, -0.24818822741508484, 1.0287506580352783, -0.8817511796951294, -0.05079786852002144, 0.7041155099868774, 0.07139222323894501, -0.34340158104896545, -0.5529909133911133, -0.4147396683692932, -0.1661824733018875, -0.21802650392055511, 0.13247884809970856, 0.05635593459010124, -0.21639132499694824, 0.4119206368923187, 0.6668742895126343, -0.6911622285842896, 0.01638210564851761, -0.539665937423706, -0.04691575840115547, 1.0334174633026123, 0.40050968527793884, 0.08680710941553116, -0.5059752464294434, -0.37835246324539185, -0.37305349111557007, -0.4194336533546448, 0.20532986521720886, 0.4694468080997467, 0.4890592694282532, -0.7530891299247742, 0.7801101803779602, -0.49315282702445984, 0.4801967740058899, -0.05873936414718628, -0.3801487982273102, 0.899768590927124, -0.541958212852478, -0.1543571501970291, -0.02955823391675949, 1.0035511255264282, 0.4562964141368866, -0.038102567195892334, 0.16628959774971008, -0.30491918325424194, 0.03470974788069725, 0.0815177708864212, -0.7340855002403259, -0.08809634298086166, 0.416718989610672, -0.5838543176651001, -0.47255951166152954, 0.14840908348560333, -0.925005316734314, -0.2261742502450943, -0.2566866874694824, 0.14597287774085999, -0.16800498962402344, -0.3765278160572052, -0.04893600940704346, -0.10929138213396072, 0.31973564624786377, 0.1552172303199768, -0.5994412899017334, 0.430838406085968, 0.5852786898612976, 0.9875782132148743, -0.05161610618233681, -0.33892372250556946, -0.3710970878601074, -0.24163849651813507, -0.21589374542236328, 0.4855886697769165, -0.2938092052936554, -0.46933209896087646, -0.24522195756435394, 0.33244070410728455, -0.29096856713294983, -0.6002814173698425, 0.8293688297271729, -0.2251153588294983, 0.18023177981376648, -0.2836191952228546, -0.43538692593574524, -0.21598899364471436, 0.39218461513519287, -0.6922751069068909, 1.5233272314071655, 0.26577502489089966, -0.8558775782585144, 0.04181504249572754, -0.8941829800605774, -0.2659640908241272, 0.03602715954184532, -0.05520501360297203, -0.6104309558868408, -0.13245651125907898, 0.17632217705249786, 0.5471439957618713, -0.3743155896663666, -0.0053370799869298935, -0.31690463423728943, -0.43318140506744385, 0.12804599106311798, 0.04514786973595619, 1.0098204612731934, 0.12728047370910645, -0.523440957069397, 0.059517111629247665, -1.011890172958374, -0.009431940503418446, 0.4883505702018738, -0.5641074776649475, -0.1939193606376648, -0.17727380990982056, 0.16000913083553314, 0.12344665080308914, 0.6223474740982056, -0.6573054790496826, 0.40723705291748047, -0.17813514173030853, 0.2906390428543091, 0.90602707862854, -0.06544017791748047, 0.3468598425388336, -0.48176875710487366, 0.5275769829750061, -0.0966709777712822, 0.3109222948551178, 0.09349581599235535, -0.6089762449264526, -0.803579568862915, -0.16102920472621918, 0.06626992672681808, 0.7006252408027649, -0.4899924397468567, 0.6763450503349304, -0.36094972491264343, -0.7836792469024658, -0.7113838791847229, 0.14401352405548096, 0.4139955937862396, 0.5852724313735962, 0.4205252528190613, -0.3187430500984192, -0.7692243456840515, -0.9715540409088135, 0.07581880688667297, -0.22210076451301575, -0.001914864405989647, 0.6299352049827576, 1.0203399658203125, -0.3833887577056885, 0.5985963344573975, -0.6780745387077332, -0.36933812499046326, -0.29288506507873535, 0.0493256114423275, 0.8024628758430481, 0.4750143587589264, 0.5207406878471375, -0.592251181602478, -0.25550803542137146, -0.025558527559041977, -0.8314290046691895, -0.2279188483953476, -0.14105407893657684, -0.2294803112745285, 0.3311811089515686, -0.12248868495225906, -0.4751400053501129, 0.4962634742259979, 0.5764604806900024, -0.4866563081741333, 0.6637725830078125, -0.020303070545196533, 0.36061540246009827, -1.1267837285995483, 0.2368783801794052, 0.10512135922908783, 0.10116754472255707, -0.455036461353302, -0.11936730146408081, 0.015177951194345951, 0.3701823055744171, -0.2997320890426636, 0.6704873442649841, -0.3604906499385834, -0.20255118608474731, -0.042325254529714584, 0.2391507625579834, -0.05956802889704704, 0.5155048966407776, -0.22689537703990936, 0.7402902841567993, 0.5626762509346008, -0.4209133982658386, 0.3419041931629181, 0.5551916360855103, -0.5076474547386169, 0.3037031590938568, -0.4848749339580536, 0.08349515497684479, 0.1390959471464157, 0.24196185171604156, -0.9524211287498474, -0.30591997504234314, 0.41976943612098694, -0.5846794247627258, 0.15122660994529724, -0.22590644657611847, -0.5617319941520691, -0.4583534300327301, -0.5661742687225342, 0.23471707105636597, 0.3869372010231018, -0.4970531761646271, 0.23661600053310394, 0.37404534220695496, 0.04116753488779068, -0.717556893825531, -0.6783869862556458, -0.07612890750169754, -0.34087666869163513, -0.6268071532249451, 0.3636789619922638, -0.17145882546901703, -0.19482402503490448, -0.050492238253355026, -0.16904287040233612, -0.008572624064981937, 0.22970594465732574, 0.3503153622150421, 0.6362165808677673, -0.1758049726486206, -0.3576154410839081, -0.2388109266757965, -0.19828593730926514, 0.054823167622089386, 0.1543911099433899, 0.5555426478385925, -0.23650960624217987, -0.2278066873550415, -0.3133356273174286, 0.0875188484787941, 0.48994180560112, -0.20848077535629272, 0.7995054721832275, 0.6560719013214111, -0.21299591660499573, 0.017005126923322678, -0.4732852876186371, -0.07782991230487823, -0.46359342336654663, 0.2632846534252167, -0.25918182730674744, -0.8739665746688843, 0.8059728741645813, 0.2422800213098526, 0.2535150945186615, 0.7061529159545898, 0.5258521437644958, 0.07790083438158035, 0.726071834564209, 0.3049972951412201, -0.17073224484920502, 0.5278644561767578, -0.8045632839202881, 0.026876350864768028, -1.187931776046753, -0.44717738032341003, -0.4986642599105835, -0.4911787807941437, -0.8939054608345032, -0.38588446378707886, 0.17986543476581573, 0.14662128686904907, -0.4052192270755768, 0.5754410028457642, -0.6651641726493835, 0.16503405570983887, 0.6909469366073608, 0.1401268094778061, 0.046572279185056686, -0.038463037461042404, -0.06535536050796509, 0.20344826579093933, -0.387158066034317, -0.5341444611549377, 1.487513542175293, 0.19660402834415436, 0.6130907535552979, -0.021849438548088074, 0.9752041697502136, 0.21930724382400513, 0.26974719762802124, -0.4867153465747833, 0.6556711792945862, -0.04101674258708954, -0.543999433517456, -0.17626599967479706, -0.5559085011482239, -1.0176007747650146, 0.19453981518745422, -0.08820604532957077, -0.9952848553657532, 0.08768860995769501, -0.1589849591255188, -0.127829909324646, 0.3828813135623932, -0.4276037812232971, 0.7687481641769409, -0.29011186957359314, -0.34616991877555847, 0.041842661798000336, -0.8826627135276794, 0.4016798138618469, 0.07297800481319427, 0.344901442527771, -0.36675122380256653, -0.08315809071063995, 1.1945935487747192, -0.45582103729248047, 0.7120782136917114, -0.10854021459817886, 0.11394307762384415, 0.31762146949768066, -0.2911601662635803, 0.6003734469413757, -0.051916033029556274, -0.3010874092578888, 0.5250197052955627, -0.15688811242580414, -0.33134832978248596, -0.24485981464385986, 0.9048026204109192, -0.9158090949058533, -0.343790203332901, -0.4332360625267029, -0.5758054256439209, 0.3277367353439331, 0.20253466069698334, 0.2837570607662201, 0.2178267538547516, 0.07968305051326752, 0.22319002449512482, 0.278264582157135, -0.2693686783313751, 0.488675981760025, 0.5175579786300659, -0.09219203144311905, -0.6781817078590393, 0.7378015518188477, 0.24477702379226685, -0.033921148627996445, 0.21405231952667236, 0.01802971214056015, -0.5533944368362427, -0.3760337233543396, -0.43170878291130066, 0.41820159554481506, -0.43462204933166504, -0.3360495865345001, -0.43804407119750977, -0.2057480663061142, -0.39735671877861023, -0.017265092581510544, -0.42907169461250305, -0.4815707504749298, -0.5103346109390259, -0.3156002461910248, 0.6873962879180908, 0.6813225150108337, -0.3779037296772003, 0.31780895590782166, -0.7029469013214111, 0.29530495405197144, -0.24275556206703186, 0.4487733244895935, -0.09236623346805573, -0.5190600156784058, -0.3932625949382782, 0.06552040576934814, -0.47006261348724365, -0.9535765051841736, 0.5609557628631592, -0.055179011076688766, 0.7119536399841309, 0.21739374101161957, 0.16776889562606812, 0.7304595112800598, -0.16480660438537598, 1.035091519355774, -0.04726697504520416, -0.6925939321517944, 0.7694657444953918, -0.32268816232681274, 0.015240821987390518, 0.4711982309818268, 0.13545472919940948, -0.564559817314148, -0.2658509314060211, -0.9355087280273438, -1.207315444946289, 1.1759371757507324, 0.6472917795181274, -0.31548431515693665, 0.07002703100442886, 0.3737327456474304, -0.10968630760908127, 0.21429184079170227, -0.6253057718276978, -0.8679975867271423, -0.0597420334815979, -0.20944128930568695, -0.11118454486131668, -0.10199672728776932, -0.3918382525444031, -0.38020026683807373, 0.9159027934074402, 0.03388088569045067, 0.41275689005851746, 0.2080790400505066, 0.01471058838069439, -0.08308058977127075, 0.2488894909620285, 0.5240949988365173, 0.5991190075874329, -0.4016370177268982, -0.08807357400655746, 0.36111438274383545, -0.589779794216156, 0.08973082900047302, 0.31395456194877625, 0.053916085511446, -0.0965767502784729, 0.5668607950210571, 0.8821130394935608, 0.0460616759955883, -0.4718264043331146, 0.498267263174057, 0.010363537818193436, -0.3171084225177765, -0.461303174495697, 0.09633059054613113, -0.054301340132951736, 0.4095359742641449, 0.4784674048423767, -0.1543203592300415, 0.05685730651021004, -0.1679341197013855, 0.2592514455318451, 0.22678200900554657, -0.013919400051236153, -0.29976382851600647, 0.6710510849952698, -0.0851232260465622, -0.3322332203388214, 0.7549453973770142, -0.19216947257518768, -0.524274468421936, 1.135266900062561, 0.3150425851345062, 0.8239819407463074, -0.23595963418483734, 0.13018211722373962, 0.6176206469535828, 0.40374115109443665, -0.12806786596775055, 0.6237415075302124, 0.013153309933841228, -0.5671236515045166, -0.29337233304977417, -0.8182154297828674, -0.23386797308921814, 0.4222058951854706, -1.0433143377304077, 0.35125941038131714, -0.15573053061962128, -0.22179004549980164, -0.18722978234291077, 0.5131409168243408, -0.8142290711402893, 0.1547134965658188, 0.05923096835613251, 0.8418205380439758, -1.008576512336731, 0.6114233732223511, 0.9099158048629761, -0.4463578760623932, -0.8970075249671936, -0.3315904140472412, 0.11011091619729996, -0.8859056234359741, 0.45194417238235474, 0.33470189571380615, 0.32881173491477966, -0.2752177119255066, -0.6622834205627441, -1.1624205112457275, 1.5240856409072876, 0.21697184443473816, -0.5892536640167236, 0.24955108761787415, 0.14540615677833557, 0.3788306415081024, -0.3248554766178131, 0.5660558938980103, 0.7953116297721863, 0.688595712184906, -0.013152886182069778, -0.9670872092247009, 0.3366418480873108, -0.5460119843482971, -0.11937081813812256, 0.2868233025074005, -0.8627341389656067, 1.0481853485107422, -0.19457726180553436, -0.05741282179951668, 0.10538925230503082, 0.3486229479312897, 0.6742805242538452, 0.4375370442867279, 0.40990567207336426, 0.8113792538642883, 0.7597015500068665, -0.3078839182853699, 1.0760679244995117, -0.3600611686706543, 0.8914727568626404, 1.037408471107483, -0.026517773047089577, 0.7863196134567261, 0.35759031772613525, -0.39633601903915405, 0.61507248878479, 0.9003327488899231, -0.3701254725456238, 0.4131154716014862, 0.16658177971839905, -0.06903739273548126, -0.03563062474131584, -0.042221054434776306, -0.5089398622512817, 0.40861064195632935, 0.24399657547473907, -0.5471739768981934, -0.207840695977211, -0.4266211986541748, 0.1507599800825119, -0.33841344714164734, -0.16491125524044037, 0.5575765371322632, -0.01373663917183876, -0.37966200709342957, 0.7345128059387207, -0.02430516667664051, 0.7012680768966675, -0.5597792267799377, -0.15282943844795227, -0.2822835147380829, 0.21800996363162994, -0.582394003868103, -0.9569457173347473, 0.18741115927696228, 0.12427129596471786, -0.2125176638364792, -0.1680939793586731, 0.5334137678146362, -0.18451941013336182, -0.5787338018417358, 0.5328077673912048, 0.40973031520843506, 0.4162026047706604, 0.12187525629997253, -0.9274213314056396, 0.15974915027618408, 0.2642532289028168, -0.7901375889778137, 0.35511356592178345, 0.22464485466480255, 0.05740328133106232, 0.5599982738494873, 0.8049713373184204, 0.028861474245786667, 0.07878594845533371, -0.09443230926990509, 1.0708503723144531, -0.7877171039581299, -0.3545587360858917, -0.8213107585906982, 0.8283126950263977, -0.24051527678966522, -0.6284269094467163, 0.8754565715789795, 0.994094729423523, 0.8482022881507874, 0.19378730654716492, 0.7847102880477905, -0.4066181182861328, 0.5351678133010864, -0.19986554980278015, 0.7976366281509399, -0.8916460871696472, 0.3170202374458313, -0.2104467749595642, -0.822978675365448, -0.09609033912420273, 0.811656653881073, -0.2837807238101959, -0.0787610337138176, 0.4896530210971832, 1.0013386011123657, 0.06498192995786667, 0.15084697306156158, -0.09776662290096283, 0.41169318556785583, 0.24851056933403015, 0.6169837713241577, 0.6536869406700134, -0.7021510601043701, 0.48448804020881653, -0.5964771509170532, -0.42899930477142334, -0.2170475870370865, -0.6196801662445068, -0.7587330937385559, -0.5161043405532837, -0.29326748847961426, -0.5058268904685974, -0.06333799660205841, 0.9351800084114075, 0.47196269035339355, -0.8360245227813721, -0.53746098279953, 0.045748595148324966, 0.2373616248369217, -0.19753359258174896, -0.33566951751708984, 0.5587331652641296, -0.08601308614015579, -0.7995421290397644, 0.4123374819755554, -0.041677650064229965, -0.16666993498802185, 0.03751940280199051, -0.24429269134998322, -0.36909976601600647, -0.2567427158355713, 0.4136553406715393, 0.17067427933216095, -0.7051520943641663, -0.21636484563350677, -0.004089729860424995, -0.05084391310811043, 0.3458513617515564, 0.2567283511161804, -0.5806124210357666, 0.014865543693304062, 0.49915391206741333, 0.26735952496528625, 0.7448937296867371, -0.050546932965517044, 0.12588666379451752, -0.7116551995277405, -0.04790043830871582, -0.017227094620466232, 0.5153878927230835, 0.2675083875656128, -0.4499979317188263, 1.0428781509399414, 0.297826886177063, -0.7478322982788086, -0.9420696496963501, -0.1863323152065277, -1.232104778289795, 0.04349591210484505, 1.3951566219329834, -0.2328214943408966, -0.36343929171562195, 0.0770057663321495, -0.22133390605449677, 0.3846645951271057, -0.7758117914199829, 0.6321117281913757, 0.5855701565742493, -0.4058968722820282, 0.04899912327528, -0.7019045948982239, 0.25015705823898315, 0.11738467961549759, -0.8896263837814331, -0.13011644780635834, 0.23168373107910156, 0.4651125371456146, 0.3253720700740814, 0.6292028427124023, 0.01364391390234232, -0.14781548082828522, -0.04354725033044815, 0.24206559360027313, -0.08108758181333542, -0.05865638703107834, -0.2094605416059494, 0.062204569578170776, -0.3772154450416565, -0.5935329794883728 ]
open-llm-leaderboard/details_NousResearch__Nous-Hermes-Llama2-13b
open-llm-leaderboard
2023-10-21T23:27:28Z
200
0
[ "region:us" ]
null
2023-08-17T23:50:36Z
--- pretty_name: Evaluation run of NousResearch/Nous-Hermes-Llama2-13b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [NousResearch/Nous-Hermes-Llama2-13b](https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 6 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NousResearch__Nous-Hermes-Llama2-13b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T23:27:15.868927](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-Llama2-13b/blob/main/results_2023-10-21T23-27-15.868927.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.22934144295302014,\n\ \ \"em_stderr\": 0.004305384313236111,\n \"f1\": 0.30605285234899415,\n\ \ \"f1_stderr\": 0.004296224150122663,\n \"acc\": 0.4276861222626263,\n\ \ \"acc_stderr\": 0.010194652064655127\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.22934144295302014,\n \"em_stderr\": 0.004305384313236111,\n\ \ \"f1\": 0.30605285234899415,\n \"f1_stderr\": 0.004296224150122663\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10083396512509477,\n \ \ \"acc_stderr\": 0.008294031192126607\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7545382794001578,\n \"acc_stderr\": 0.012095272937183647\n\ \ }\n}\n```" repo_url: https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|arc:challenge|25_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|arc:challenge|25_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|arc:challenge|25_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-26T14:55:06.636628.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T20_14_41.722716 path: - '**/details_harness|drop|3_2023-10-21T20-14-41.722716.parquet' - split: 2023_10_21T21_17_49.044019 path: - '**/details_harness|drop|3_2023-10-21T21-17-49.044019.parquet' - split: 2023_10_21T23_27_15.868927 path: - '**/details_harness|drop|3_2023-10-21T23-27-15.868927.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T23-27-15.868927.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T20_14_41.722716 path: - '**/details_harness|gsm8k|5_2023-10-21T20-14-41.722716.parquet' - split: 2023_10_21T21_17_49.044019 path: - '**/details_harness|gsm8k|5_2023-10-21T21-17-49.044019.parquet' - split: 2023_10_21T23_27_15.868927 path: - '**/details_harness|gsm8k|5_2023-10-21T23-27-15.868927.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T23-27-15.868927.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hellaswag|10_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hellaswag|10_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hellaswag|10_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T14:44:05.322938.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-25T11:02:46.466402.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:55:06.636628.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-management|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:55:06.636628.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T14_44_05.322938 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T14:44:05.322938.parquet' - split: 2023_07_25T11_02_46.466402 path: - '**/details_harness|truthfulqa:mc|0_2023-07-25T11:02:46.466402.parquet' - split: 2023_07_26T14_55_06.636628 path: - '**/details_harness|truthfulqa:mc|0_2023-07-26T14:55:06.636628.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-26T14:55:06.636628.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T20_14_41.722716 path: - '**/details_harness|winogrande|5_2023-10-21T20-14-41.722716.parquet' - split: 2023_10_21T21_17_49.044019 path: - '**/details_harness|winogrande|5_2023-10-21T21-17-49.044019.parquet' - split: 2023_10_21T23_27_15.868927 path: - '**/details_harness|winogrande|5_2023-10-21T23-27-15.868927.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T23-27-15.868927.parquet' - config_name: results data_files: - split: 2023_07_24T14_44_05.322938 path: - results_2023-07-24T14:44:05.322938.parquet - split: 2023_07_25T11_02_46.466402 path: - results_2023-07-25T11:02:46.466402.parquet - split: 2023_07_26T14_55_06.636628 path: - results_2023-07-26T14:55:06.636628.parquet - split: 2023_10_21T20_14_41.722716 path: - results_2023-10-21T20-14-41.722716.parquet - split: 2023_10_21T21_17_49.044019 path: - results_2023-10-21T21-17-49.044019.parquet - split: 2023_10_21T23_27_15.868927 path: - results_2023-10-21T23-27-15.868927.parquet - split: latest path: - results_2023-10-21T23-27-15.868927.parquet --- # Dataset Card for Evaluation run of NousResearch/Nous-Hermes-Llama2-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [NousResearch/Nous-Hermes-Llama2-13b](https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 6 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NousResearch__Nous-Hermes-Llama2-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T23:27:15.868927](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-Llama2-13b/blob/main/results_2023-10-21T23-27-15.868927.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.22934144295302014, "em_stderr": 0.004305384313236111, "f1": 0.30605285234899415, "f1_stderr": 0.004296224150122663, "acc": 0.4276861222626263, "acc_stderr": 0.010194652064655127 }, "harness|drop|3": { "em": 0.22934144295302014, "em_stderr": 0.004305384313236111, "f1": 0.30605285234899415, "f1_stderr": 0.004296224150122663 }, "harness|gsm8k|5": { "acc": 0.10083396512509477, "acc_stderr": 0.008294031192126607 }, "harness|winogrande|5": { "acc": 0.7545382794001578, "acc_stderr": 0.012095272937183647 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4020412266254425, -0.5842499136924744, 0.2881803810596466, 0.19248615205287933, -0.1503329575061798, 0.13119134306907654, -0.31732165813446045, -0.2059098482131958, 0.4901428818702698, 0.630282998085022, -0.7817681431770325, -0.9659211039543152, -0.6556994318962097, 0.2594029903411865, -0.23910285532474518, 1.226999282836914, -0.24951282143592834, -0.23566101491451263, 0.021937163546681404, -0.35879915952682495, -0.22078551352024078, -0.37810415029525757, -0.4804995357990265, -0.3731711506843567, 0.40138664841651917, 0.7039130926132202, 0.279319167137146, 0.7563105225563049, 0.6897546052932739, 0.3396669328212738, -0.15359249711036682, 0.2413926124572754, -0.4774784445762634, -0.012968041002750397, 0.2264576107263565, -0.6333358883857727, -0.8270918130874634, 0.15736445784568787, 0.6627782583236694, 0.42445629835128784, -0.1767738163471222, 0.6813443899154663, 0.11616171151399612, 0.5301504135131836, -0.45101985335350037, 0.3248312175273895, -0.37308070063591003, -0.11570916324853897, -0.41090598702430725, -0.2711295187473297, -0.007234247401356697, -0.3252682387828827, -0.13426832854747772, -0.652889609336853, 0.2399749457836151, 0.08231783658266068, 1.0888117551803589, 0.1964140385389328, -0.23224656283855438, -0.2645013630390167, -0.2744702100753784, 1.0148835182189941, -0.8980112075805664, -0.058979593217372894, 0.6819146871566772, 0.07764563709497452, -0.34656181931495667, -0.5480809211730957, -0.37409380078315735, -0.15615376830101013, -0.21100375056266785, 0.1550881713628769, 0.054948557168245316, -0.19144071638584137, 0.42066943645477295, 0.6719262599945068, -0.699029266834259, 0.01050480268895626, -0.5435411334037781, -0.044393390417099, 1.0289652347564697, 0.4153653085231781, 0.07556459307670593, -0.4880889356136322, -0.3866078555583954, -0.3413800299167633, -0.4186834692955017, 0.1690458208322525, 0.46995794773101807, 0.4720233380794525, -0.7559283971786499, 0.7721256017684937, -0.47906920313835144, 0.4835156500339508, -0.05493145063519478, -0.36286959052085876, 0.8994475603103638, -0.5323416590690613, -0.17745518684387207, -0.025322843343019485, 1.0000114440917969, 0.4263111352920532, -0.03846743330359459, 0.15081046521663666, -0.31287840008735657, 0.017536433413624763, 0.08665717393159866, -0.7681416869163513, -0.08387085795402527, 0.42842262983322144, -0.575954020023346, -0.47426560521125793, 0.1811532974243164, -0.9192662239074707, -0.22778008878231049, -0.24852582812309265, 0.15400774776935577, -0.19300508499145508, -0.3507028818130493, -0.05347704887390137, -0.10947573930025101, 0.30593883991241455, 0.16680054366588593, -0.6036795377731323, 0.4509224593639374, 0.5617629885673523, 0.9791961908340454, -0.08566783368587494, -0.33886587619781494, -0.35712870955467224, -0.2252950221300125, -0.21236731112003326, 0.4903470575809479, -0.27812615036964417, -0.48096346855163574, -0.24066278338432312, 0.33274969458580017, -0.3023839294910431, -0.623029351234436, 0.8371312618255615, -0.21421664953231812, 0.19322234392166138, -0.26753589510917664, -0.46014124155044556, -0.19046802818775177, 0.3709828555583954, -0.6889108419418335, 1.5259214639663696, 0.24262718856334686, -0.8475826978683472, 0.013120179995894432, -0.9131931662559509, -0.253305584192276, 0.04250279441475868, -0.06803851574659348, -0.6010448336601257, -0.1032315194606781, 0.168757364153862, 0.5544344782829285, -0.36357834935188293, -0.023899922147393227, -0.32928967475891113, -0.44001802802085876, 0.14467467367649078, 0.011280850507318974, 1.0067825317382812, 0.15125741064548492, -0.5304875373840332, 0.07228740304708481, -1.0410898923873901, 0.005763369146734476, 0.45561742782592773, -0.5490585565567017, -0.18406029045581818, -0.20988567173480988, 0.16215519607067108, 0.10960863530635834, 0.6180024743080139, -0.6572032570838928, 0.40001869201660156, -0.17729824781417847, 0.2848573327064514, 0.9095862507820129, -0.051061149686574936, 0.3664591908454895, -0.4838272035121918, 0.5185865163803101, -0.07547688484191895, 0.3045499324798584, 0.08920110017061234, -0.5992839336395264, -0.8163236379623413, -0.16892504692077637, 0.07826840132474899, 0.7071455717086792, -0.4801863431930542, 0.6854926347732544, -0.3562750816345215, -0.7880523204803467, -0.697111189365387, 0.1317676603794098, 0.3994056284427643, 0.5579996109008789, 0.4172896444797516, -0.3008747100830078, -0.7528666853904724, -0.9738921523094177, 0.07599500566720963, -0.1925080120563507, -0.012890852056443691, 0.6363176107406616, 1.041037678718567, -0.38703516125679016, 0.5839203000068665, -0.6833568811416626, -0.37008246779441833, -0.3069203495979309, 0.06811657547950745, 0.8004972338676453, 0.47609466314315796, 0.5270546078681946, -0.5693204998970032, -0.2462971955537796, -0.02751319296658039, -0.830145001411438, -0.24982036650180817, -0.16775676608085632, -0.23037029802799225, 0.334332138299942, -0.09867612272500992, -0.473929226398468, 0.48502930998802185, 0.5827000737190247, -0.49824267625808716, 0.6547700762748718, -0.04916661977767944, 0.3648982644081116, -1.1467093229293823, 0.2505855858325958, 0.10559479892253876, 0.100693479180336, -0.44671279191970825, -0.1364259272813797, 0.03145153820514679, 0.35033148527145386, -0.3119858503341675, 0.6785327792167664, -0.3895130455493927, -0.20750924944877625, -0.03973648324608803, 0.23142202198505402, -0.06708994507789612, 0.503418505191803, -0.21875791251659393, 0.7554671168327332, 0.5502979159355164, -0.42664918303489685, 0.3411942720413208, 0.5290369987487793, -0.5370580554008484, 0.28963831067085266, -0.4679011404514313, 0.05948599427938461, 0.13585427403450012, 0.22447198629379272, -0.9565136432647705, -0.30535122752189636, 0.3964361846446991, -0.5951831936836243, 0.15709279477596283, -0.23247769474983215, -0.5391894578933716, -0.43663597106933594, -0.5434281229972839, 0.2194867581129074, 0.38424769043922424, -0.48239973187446594, 0.249876007437706, 0.35607534646987915, 0.060378316789865494, -0.7036482095718384, -0.6886430382728577, -0.0895676538348198, -0.34658607840538025, -0.6448657512664795, 0.3915759325027466, -0.16834492981433868, -0.1946553736925125, -0.04025353491306305, -0.1825297623872757, -0.01087179034948349, 0.2400018870830536, 0.3578570485115051, 0.631037175655365, -0.16358700394630432, -0.3825744688510895, -0.24868054687976837, -0.21763576567173004, 0.06405353546142578, 0.13570410013198853, 0.5550217032432556, -0.21726667881011963, -0.21734173595905304, -0.30875617265701294, 0.0846315398812294, 0.5126773118972778, -0.20614522695541382, 0.7976075410842896, 0.6506445407867432, -0.21711160242557526, -0.025068826973438263, -0.4730817973613739, -0.09876426309347153, -0.46701592206954956, 0.26008978486061096, -0.25335124135017395, -0.8738160729408264, 0.8131629824638367, 0.22835038602352142, 0.24539853632450104, 0.6949659585952759, 0.5403927564620972, 0.06243261322379112, 0.7060031294822693, 0.2883862555027008, -0.1517542004585266, 0.5273464918136597, -0.825492262840271, 0.017186235636472702, -1.1714509725570679, -0.42944177985191345, -0.5063722133636475, -0.49673134088516235, -0.8881902098655701, -0.39734265208244324, 0.19735078513622284, 0.13438169658184052, -0.3953881561756134, 0.5776754021644592, -0.6245501041412354, 0.17191177606582642, 0.6882277727127075, 0.12439349293708801, 0.0609988272190094, -0.045929767191410065, -0.07163429260253906, 0.19672447443008423, -0.4061925411224365, -0.5257726907730103, 1.4691516160964966, 0.19758668541908264, 0.6397578716278076, -0.023668790236115456, 1.0097053050994873, 0.21032865345478058, 0.25101426243782043, -0.48419272899627686, 0.6693472862243652, -0.03462491184473038, -0.5606582760810852, -0.16136817634105682, -0.5866680145263672, -1.0107976198196411, 0.1717902272939682, -0.10381118208169937, -0.9991452693939209, 0.10968524217605591, -0.13758355379104614, -0.15237899124622345, 0.38673868775367737, -0.43213167786598206, 0.7910221815109253, -0.29474717378616333, -0.3592609167098999, 0.04522959887981415, -0.8536707162857056, 0.39007821679115295, 0.09177715331315994, 0.34963393211364746, -0.3493664562702179, -0.06701721996068954, 1.1735048294067383, -0.46127718687057495, 0.6926257014274597, -0.11172601580619812, 0.0994836837053299, 0.30335065722465515, -0.28329038619995117, 0.5937998294830322, -0.054737139493227005, -0.28936848044395447, 0.5068378448486328, -0.15197451412677765, -0.3234574794769287, -0.26399311423301697, 0.9276729226112366, -0.9182736873626709, -0.3213967978954315, -0.4396427273750305, -0.5732032060623169, 0.30589479207992554, 0.2091103047132492, 0.31045785546302795, 0.24986928701400757, 0.07097805291414261, 0.23497232794761658, 0.3061661422252655, -0.2738948464393616, 0.49013280868530273, 0.5210960507392883, -0.09872723370790482, -0.6784199476242065, 0.7385112047195435, 0.26523035764694214, -0.02199540100991726, 0.21989856660366058, -0.007325420156121254, -0.550772488117218, -0.40679633617401123, -0.4232015907764435, 0.4160642623901367, -0.41141584515571594, -0.333402544260025, -0.42318782210350037, -0.20940347015857697, -0.4179803133010864, -0.013030876405537128, -0.4382658004760742, -0.4833125174045563, -0.49366888403892517, -0.32485345005989075, 0.6787293553352356, 0.6780924201011658, -0.38772377371788025, 0.31207501888275146, -0.7201176285743713, 0.289676696062088, -0.2649221420288086, 0.46076270937919617, -0.09968042373657227, -0.5275627970695496, -0.4084309935569763, 0.051206935197114944, -0.4698635935783386, -0.9470043778419495, 0.5522806644439697, -0.07385432720184326, 0.7074782848358154, 0.23113732039928436, 0.18285778164863586, 0.7213727831840515, -0.17715024948120117, 1.0234425067901611, -0.07044496387243271, -0.6962884068489075, 0.7599697113037109, -0.3396316170692444, 0.02504088543355465, 0.488838255405426, 0.14437344670295715, -0.5628374814987183, -0.2669886350631714, -0.9465438723564148, -1.2143490314483643, 1.170017957687378, 0.6466184854507446, -0.31919941306114197, 0.06538902223110199, 0.3609457314014435, -0.13122312724590302, 0.19135156273841858, -0.6170006990432739, -0.8787661790847778, -0.030493246391415596, -0.20858007669448853, -0.09341742843389511, -0.11044888943433762, -0.3900439143180847, -0.351327121257782, 0.9360153675079346, 0.04673941433429718, 0.43985486030578613, 0.22007709741592407, 0.016151074320077896, -0.0693800076842308, 0.2649898827075958, 0.5235295295715332, 0.6084223389625549, -0.43042314052581787, -0.09444542229175568, 0.352175235748291, -0.5758213996887207, 0.08087404817342758, 0.3196778893470764, 0.030743366107344627, -0.09328662604093552, 0.5656535625457764, 0.905447781085968, 0.04503390192985535, -0.46616870164871216, 0.4907599687576294, 0.017410891130566597, -0.3479951024055481, -0.48524004220962524, 0.09618782252073288, -0.052768196910619736, 0.40584754943847656, 0.4733038544654846, -0.14742670953273773, 0.07575579732656479, -0.18780168890953064, 0.28059902787208557, 0.23143139481544495, -0.02743024192750454, -0.28339123725891113, 0.6708346009254456, -0.05180108919739723, -0.3026884198188782, 0.738880455493927, -0.17100414633750916, -0.49217844009399414, 1.1221054792404175, 0.3140164315700531, 0.8386993408203125, -0.22612415254116058, 0.12515313923358917, 0.6055886149406433, 0.39104029536247253, -0.13220885396003723, 0.6256271600723267, 0.03240618854761124, -0.5946667790412903, -0.30837175250053406, -0.7904362678527832, -0.23230934143066406, 0.4502280056476593, -1.0600553750991821, 0.35966336727142334, -0.18212151527404785, -0.22206126153469086, -0.18866436183452606, 0.508015513420105, -0.7904078960418701, 0.1698845773935318, 0.09660343080759048, 0.8856934905052185, -1.0076570510864258, 0.643464982509613, 0.9102666974067688, -0.4188278317451477, -0.8865729570388794, -0.328296035528183, 0.12841446697711945, -0.8860052824020386, 0.44196659326553345, 0.3314589262008667, 0.3213813006877899, -0.255947083234787, -0.6311487555503845, -1.1443231105804443, 1.5232959985733032, 0.22628293931484222, -0.5817141532897949, 0.2214161455631256, 0.15118099749088287, 0.36066895723342896, -0.3175487518310547, 0.5788172483444214, 0.7842844128608704, 0.689832329750061, -0.0028024474158883095, -0.9413427710533142, 0.3190488815307617, -0.542180061340332, -0.12848293781280518, 0.2961948812007904, -0.8409736752510071, 1.0228890180587769, -0.18665683269500732, -0.06344202905893326, 0.13954196870326996, 0.34856846928596497, 0.6410562992095947, 0.4159754514694214, 0.378120481967926, 0.8342096209526062, 0.7517860531806946, -0.323827862739563, 1.08017098903656, -0.3704856038093567, 0.8966289758682251, 1.052983045578003, -0.019249605014920235, 0.7638885974884033, 0.35518237948417664, -0.3743045926094055, 0.5922791361808777, 0.8752469420433044, -0.38575196266174316, 0.41675642132759094, 0.16991961002349854, -0.06847157329320908, -0.042466457933187485, -0.018623163923621178, -0.49411430954933167, 0.387960284948349, 0.23522013425827026, -0.5564793348312378, -0.227402001619339, -0.42777782678604126, 0.15303069353103638, -0.33905911445617676, -0.1738162338733673, 0.5718571543693542, -0.007051622495055199, -0.3711990714073181, 0.7261275053024292, -0.0031929579563438892, 0.7056938409805298, -0.5887616276741028, -0.13083907961845398, -0.30775007605552673, 0.24629200994968414, -0.586291491985321, -0.9510390758514404, 0.19544342160224915, 0.11859554797410965, -0.20815372467041016, -0.1924709975719452, 0.5317724347114563, -0.19814647734165192, -0.5792568325996399, 0.542628824710846, 0.42148855328559875, 0.40981319546699524, 0.12247422337532043, -0.9218071103096008, 0.16021764278411865, 0.2778543531894684, -0.7735166549682617, 0.3761197328567505, 0.25539639592170715, 0.07370837032794952, 0.5528779029846191, 0.7731789350509644, 0.040441736578941345, 0.06746004521846771, -0.09823192656040192, 1.0506113767623901, -0.7825285792350769, -0.33590662479400635, -0.8328614234924316, 0.8430485725402832, -0.2512771487236023, -0.6378493309020996, 0.8956592679023743, 1.0092167854309082, 0.8628419041633606, 0.17847712337970734, 0.7977370619773865, -0.397962361574173, 0.5415806770324707, -0.22299779951572418, 0.7942622303962708, -0.8757008910179138, 0.3267184793949127, -0.2219391167163849, -0.8332856297492981, -0.09746134281158447, 0.8207508325576782, -0.2948592007160187, -0.08873651176691055, 0.4955463409423828, 1.009634017944336, 0.05372750014066696, 0.1538664996623993, -0.07146424055099487, 0.43588027358055115, 0.2357584685087204, 0.6117098331451416, 0.6362835764884949, -0.7042995095252991, 0.48881855607032776, -0.5846847295761108, -0.4320565462112427, -0.20998963713645935, -0.6170194745063782, -0.7689570784568787, -0.4997299313545227, -0.2909022867679596, -0.5219759345054626, -0.06021398305892944, 0.9465724229812622, 0.4545835256576538, -0.8537363409996033, -0.5624560713768005, 0.03093617968261242, 0.20008400082588196, -0.1958996057510376, -0.3345363438129425, 0.5461358428001404, -0.10698408633470535, -0.8108014464378357, 0.4324066638946533, -0.04021986201405525, -0.1514769196510315, 0.03850233927369118, -0.23622307181358337, -0.342732697725296, -0.24708744883537292, 0.3979821503162384, 0.1546008586883545, -0.6678692102432251, -0.19022120535373688, 0.005390421021729708, -0.04320431128144264, 0.34150683879852295, 0.2825305759906769, -0.5632022619247437, -0.022370392456650734, 0.5068566799163818, 0.28342464566230774, 0.7508882880210876, -0.046017635613679886, 0.16859616339206696, -0.7158471941947937, -0.05453624948859215, -0.012324127368628979, 0.5240529179573059, 0.23196956515312195, -0.4291960597038269, 1.0371291637420654, 0.3080597221851349, -0.7472856640815735, -0.9335800409317017, -0.17464853823184967, -1.2343955039978027, 0.03110564686357975, 1.4047731161117554, -0.23783493041992188, -0.34719541668891907, 0.09250609576702118, -0.22166751325130463, 0.3464629650115967, -0.7883281111717224, 0.6069778203964233, 0.5926429629325867, -0.4051371216773987, 0.03177276626229286, -0.7098486423492432, 0.28087103366851807, 0.12380047142505646, -0.896137535572052, -0.10006864368915558, 0.2601538896560669, 0.45402050018310547, 0.30771711468696594, 0.631618857383728, -0.022036375477910042, -0.16455979645252228, -0.06866643577814102, 0.23656167089939117, -0.07810043543577194, -0.07919599115848541, -0.2101687639951706, 0.06125061213970184, -0.3848673403263092, -0.5800549387931824 ]
open-llm-leaderboard/details_NousResearch__Nous-Hermes-13b
open-llm-leaderboard
2023-10-19T04:00:55Z
200
0
[ "region:us" ]
null
2023-08-17T23:51:13Z
--- pretty_name: Evaluation run of NousResearch/Nous-Hermes-13b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [NousResearch/Nous-Hermes-13b](https://huggingface.co/NousResearch/Nous-Hermes-13b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NousResearch__Nous-Hermes-13b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T04:00:41.897332](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-13b/blob/main/results_2023-10-19T04-00-41.897332.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2930998322147651,\n\ \ \"em_stderr\": 0.00466150847986569,\n \"f1\": 0.37501048657718355,\n\ \ \"f1_stderr\": 0.004576570475121802,\n \"acc\": 0.41817812997218123,\n\ \ \"acc_stderr\": 0.009868526609981134\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.2930998322147651,\n \"em_stderr\": 0.00466150847986569,\n\ \ \"f1\": 0.37501048657718355,\n \"f1_stderr\": 0.004576570475121802\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08339651250947688,\n \ \ \"acc_stderr\": 0.00761565027710669\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7529597474348856,\n \"acc_stderr\": 0.012121402942855576\n\ \ }\n}\n```" repo_url: https://huggingface.co/NousResearch/Nous-Hermes-13b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|arc:challenge|25_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-18T15:33:41.626742.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T04_00_41.897332 path: - '**/details_harness|drop|3_2023-10-19T04-00-41.897332.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T04-00-41.897332.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T04_00_41.897332 path: - '**/details_harness|gsm8k|5_2023-10-19T04-00-41.897332.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T04-00-41.897332.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hellaswag|10_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-18T15:33:41.626742.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-management|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-18T15:33:41.626742.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_18T15_33_41.626742 path: - '**/details_harness|truthfulqa:mc|0_2023-07-18T15:33:41.626742.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-18T15:33:41.626742.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T04_00_41.897332 path: - '**/details_harness|winogrande|5_2023-10-19T04-00-41.897332.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T04-00-41.897332.parquet' - config_name: results data_files: - split: 2023_07_18T15_33_41.626742 path: - results_2023-07-18T15:33:41.626742.parquet - split: 2023_10_19T04_00_41.897332 path: - results_2023-10-19T04-00-41.897332.parquet - split: latest path: - results_2023-10-19T04-00-41.897332.parquet --- # Dataset Card for Evaluation run of NousResearch/Nous-Hermes-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NousResearch/Nous-Hermes-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [NousResearch/Nous-Hermes-13b](https://huggingface.co/NousResearch/Nous-Hermes-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NousResearch__Nous-Hermes-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T04:00:41.897332](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-13b/blob/main/results_2023-10-19T04-00-41.897332.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2930998322147651, "em_stderr": 0.00466150847986569, "f1": 0.37501048657718355, "f1_stderr": 0.004576570475121802, "acc": 0.41817812997218123, "acc_stderr": 0.009868526609981134 }, "harness|drop|3": { "em": 0.2930998322147651, "em_stderr": 0.00466150847986569, "f1": 0.37501048657718355, "f1_stderr": 0.004576570475121802 }, "harness|gsm8k|5": { "acc": 0.08339651250947688, "acc_stderr": 0.00761565027710669 }, "harness|winogrande|5": { "acc": 0.7529597474348856, "acc_stderr": 0.012121402942855576 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.42590227723121643, -0.6028454303741455, 0.283868670463562, 0.1803985983133316, -0.11027763783931732, 0.09422949701547623, -0.34591323137283325, -0.16740943491458893, 0.4809880554676056, 0.6134668588638306, -0.7962977886199951, -0.992957353591919, -0.6175059676170349, 0.24695372581481934, -0.22661545872688293, 1.1894315481185913, -0.23051902651786804, -0.1951441615819931, 0.046304814517498016, -0.36556681990623474, -0.19592097401618958, -0.367063045501709, -0.5142433643341064, -0.35171425342559814, 0.41471779346466064, 0.6839582920074463, 0.2785749137401581, 0.7494077086448669, 0.6754885911941528, 0.3485991656780243, -0.13339078426361084, 0.2360776960849762, -0.46872833371162415, 0.002077824668958783, 0.18632642924785614, -0.5782436728477478, -0.8316364884376526, 0.1830245852470398, 0.6841403841972351, 0.4618927240371704, -0.17826636135578156, 0.6400023102760315, 0.119077667593956, 0.5518400073051453, -0.45858103036880493, 0.29679062962532043, -0.3675726354122162, -0.12529751658439636, -0.4096042513847351, -0.29012155532836914, -0.037873584777116776, -0.32709676027297974, -0.11371983587741852, -0.6452164649963379, 0.2825533449649811, 0.09498602896928787, 1.1131482124328613, 0.14033390581607819, -0.21646222472190857, -0.2575737535953522, -0.3076801002025604, 1.0014007091522217, -0.9029775261878967, 0.0006346894660964608, 0.633584201335907, 0.07511641085147858, -0.3144010901451111, -0.5360825061798096, -0.34709927439689636, -0.17551682889461517, -0.21261504292488098, 0.17851705849170685, 0.07068829983472824, -0.16008588671684265, 0.4327700734138489, 0.7033242583274841, -0.7169322967529297, -0.013845727778971195, -0.5576266050338745, -0.05343553051352501, 1.0267786979675293, 0.42802444100379944, 0.09879472106695175, -0.5057526230812073, -0.3717862069606781, -0.345255970954895, -0.4074852466583252, 0.16304968297481537, 0.4793865978717804, 0.536530077457428, -0.7497807741165161, 0.7633723020553589, -0.48183178901672363, 0.5282427668571472, -0.0352085717022419, -0.2796144187450409, 0.8841659426689148, -0.5281056761741638, -0.19668467342853546, -0.03816823661327362, 1.037640929222107, 0.4165687561035156, -0.036082688719034195, 0.1451471447944641, -0.3259051442146301, 0.0016774516552686691, 0.13348418474197388, -0.7623599171638489, -0.10407635569572449, 0.4154219627380371, -0.5842947363853455, -0.46272099018096924, 0.2337835282087326, -0.9421592354774475, -0.2230868637561798, -0.26099053025245667, 0.16524934768676758, -0.24295198917388916, -0.33815476298332214, -0.028892239555716515, -0.10330629348754883, 0.29509735107421875, 0.15470051765441895, -0.6130828857421875, 0.47760945558547974, 0.5403546690940857, 0.9813209772109985, -0.08382326364517212, -0.32863324880599976, -0.3774860203266144, -0.2575967609882355, -0.2134881466627121, 0.45330244302749634, -0.2586367428302765, -0.4749091565608978, -0.22989940643310547, 0.3685804009437561, -0.3710844814777374, -0.605016827583313, 0.8645603656768799, -0.22250911593437195, 0.223122701048851, -0.23575842380523682, -0.49953263998031616, -0.16933110356330872, 0.3643184006214142, -0.7023792266845703, 1.5133001804351807, 0.25408098101615906, -0.8702680468559265, 0.02045341022312641, -0.9329044222831726, -0.26443955302238464, 0.09538452327251434, -0.07750081270933151, -0.5908417105674744, -0.08746366202831268, 0.16489575803279877, 0.573835551738739, -0.31025341153144836, -0.03415414318442345, -0.3449946343898773, -0.41007208824157715, 0.13288792967796326, 0.000631831178907305, 1.0266369581222534, 0.17377257347106934, -0.5224421620368958, 0.09368115663528442, -1.0381075143814087, 0.024365421384572983, 0.4237266182899475, -0.538559079170227, -0.21081151068210602, -0.24182380735874176, 0.1794864386320114, 0.12579534947872162, 0.5979270339012146, -0.6914090514183044, 0.383562296628952, -0.20480619370937347, 0.2621685564517975, 0.8963386416435242, -0.04149840399622917, 0.40482938289642334, -0.4732445478439331, 0.5377424955368042, -0.11485016345977783, 0.2965943515300751, 0.07495589554309845, -0.5436683297157288, -0.7833526730537415, -0.16507560014724731, 0.09426048398017883, 0.7075905799865723, -0.49792155623435974, 0.6772255301475525, -0.3510403037071228, -0.7808665037155151, -0.6987228989601135, 0.09913749247789383, 0.38209614157676697, 0.5581727623939514, 0.4130837321281433, -0.27486374974250793, -0.7524740099906921, -0.9622651934623718, 0.06653182208538055, -0.16710571944713593, -0.02160397544503212, 0.6727765202522278, 1.0167073011398315, -0.3267943263053894, 0.5651918649673462, -0.6952701210975647, -0.35674235224723816, -0.31367799639701843, 0.07596757262945175, 0.7994592189788818, 0.49124783277511597, 0.5250231027603149, -0.5654782056808472, -0.2878583073616028, -0.026821628212928772, -0.8299221992492676, -0.22876548767089844, -0.18858551979064941, -0.2237420678138733, 0.34903162717819214, -0.048353489488363266, -0.4613155424594879, 0.4783652126789093, 0.5765095353126526, -0.5193731784820557, 0.6639111042022705, -0.09275602549314499, 0.40248391032218933, -1.1453560590744019, 0.24835063517093658, 0.14841017127037048, 0.07967876642942429, -0.45111319422721863, -0.11759345233440399, 0.06861969083547592, 0.31098735332489014, -0.32951441407203674, 0.7090322971343994, -0.4000434875488281, -0.1621961146593094, -0.00788554921746254, 0.2498820424079895, -0.0760982409119606, 0.4809297025203705, -0.22240577638149261, 0.7319189310073853, 0.5911901593208313, -0.4427300691604614, 0.365986168384552, 0.4989642798900604, -0.5203847885131836, 0.3042832016944885, -0.4427204728126526, 0.019513608887791634, 0.14590929448604584, 0.16299773752689362, -0.9325039982795715, -0.2900472581386566, 0.379385381937027, -0.6138981580734253, 0.19996175169944763, -0.2472786158323288, -0.5132646560668945, -0.4563285708427429, -0.5407657027244568, 0.18777424097061157, 0.34718480706214905, -0.4571150243282318, 0.25666260719299316, 0.3282931447029114, 0.04882059246301651, -0.6715766787528992, -0.662586510181427, -0.11404338479042053, -0.3200271725654602, -0.6894605159759521, 0.4099293351173401, -0.17366957664489746, -0.18466691672801971, -0.022852128371596336, -0.18980146944522858, -0.01738143526017666, 0.20676827430725098, 0.33449187874794006, 0.6505200266838074, -0.17233121395111084, -0.38772547245025635, -0.2863014340400696, -0.21004799008369446, 0.0666896402835846, 0.09868717193603516, 0.5476421117782593, -0.22200994193553925, -0.22491593658924103, -0.30281102657318115, 0.0827476754784584, 0.5431274175643921, -0.21713416278362274, 0.8280606269836426, 0.6442659497261047, -0.22168982028961182, -0.0470394492149353, -0.44847893714904785, -0.12412892282009125, -0.4694511890411377, 0.23817762732505798, -0.26348578929901123, -0.8751145005226135, 0.8150033354759216, 0.23995129764080048, 0.22402532398700714, 0.7028319239616394, 0.5201815366744995, 0.05336366221308708, 0.70292729139328, 0.28879278898239136, -0.1250034123659134, 0.5536360144615173, -0.8171951770782471, 0.032890498638153076, -1.134234070777893, -0.4104636311531067, -0.5173700451850891, -0.47448399662971497, -0.918562114238739, -0.36133280396461487, 0.19039449095726013, 0.10080883651971817, -0.3746482729911804, 0.5819962620735168, -0.6298956274986267, 0.1601843535900116, 0.716407299041748, 0.14414829015731812, 0.025014905259013176, -0.061113521456718445, -0.11834652721881866, 0.17176969349384308, -0.42236676812171936, -0.4951811730861664, 1.451895833015442, 0.17715245485305786, 0.6606112122535706, -0.05945926159620285, 1.0249519348144531, 0.196587935090065, 0.23455211520195007, -0.47887659072875977, 0.6703306436538696, -0.07081974297761917, -0.6217520236968994, -0.2000708132982254, -0.6350756287574768, -1.0000624656677246, 0.14906282722949982, -0.1318831741809845, -0.9930339455604553, 0.10828911513090134, -0.13743038475513458, -0.18117797374725342, 0.41491785645484924, -0.4252845048904419, 0.8487451672554016, -0.2666526436805725, -0.396159291267395, 0.06818658113479614, -0.8449335694313049, 0.3501119613647461, 0.11719648540019989, 0.36513790488243103, -0.3099384903907776, -0.015440271236002445, 1.1574649810791016, -0.4386109411716461, 0.6358497738838196, -0.13476750254631042, 0.1185210570693016, 0.28515389561653137, -0.26939496397972107, 0.5708417892456055, -0.06068309396505356, -0.2981107831001282, 0.45987391471862793, -0.1363344043493271, -0.3200773596763611, -0.2661604881286621, 0.9448865056037903, -0.8756426572799683, -0.28643178939819336, -0.44363272190093994, -0.5751768350601196, 0.3131961226463318, 0.25060826539993286, 0.3225838840007782, 0.3289649784564972, 0.019935892894864082, 0.2657357156276703, 0.3399207890033722, -0.30545756220817566, 0.49232539534568787, 0.5513452291488647, -0.11389923840761185, -0.6782161593437195, 0.7522286176681519, 0.2700671851634979, -0.016035132110118866, 0.22699354588985443, -0.015694594010710716, -0.5336719155311584, -0.4193650186061859, -0.4046948552131653, 0.41726991534233093, -0.3951432704925537, -0.30774879455566406, -0.4401550590991974, -0.23844967782497406, -0.454032301902771, -0.03251587226986885, -0.4644514322280884, -0.4691929817199707, -0.43738117814064026, -0.3119920790195465, 0.6312844157218933, 0.6553760170936584, -0.41056665778160095, 0.2770363688468933, -0.74754798412323, 0.28869208693504333, -0.2744796872138977, 0.490726500749588, -0.13877402245998383, -0.5125950574874878, -0.44187185168266296, 0.04508152976632118, -0.4777420163154602, -0.9522838592529297, 0.5636829137802124, -0.09264768660068512, 0.7173197865486145, 0.2317384034395218, 0.216101735830307, 0.6830880641937256, -0.19892902672290802, 0.994488000869751, -0.09443517029285431, -0.6892616748809814, 0.7478762269020081, -0.3889293968677521, 0.05895664542913437, 0.5151563882827759, 0.16516977548599243, -0.5940541625022888, -0.30128949880599976, -0.9885621666908264, -1.2080438137054443, 1.1997699737548828, 0.6625992655754089, -0.32069286704063416, 0.05248995125293732, 0.3463447690010071, -0.15993404388427734, 0.16335998475551605, -0.6123375296592712, -0.9125999212265015, -0.008336077444255352, -0.22408325970172882, -0.06503701210021973, -0.07446960359811783, -0.37091749906539917, -0.3702954649925232, 0.9695625305175781, 0.06948783993721008, 0.4258119463920593, 0.23484276235103607, 0.0376492440700531, -0.08685404807329178, 0.29928141832351685, 0.5075286030769348, 0.6215640306472778, -0.4676125943660736, -0.09951514005661011, 0.324430912733078, -0.5544788837432861, 0.05284663662314415, 0.36853012442588806, 0.011793841607868671, -0.0704071894288063, 0.5401947498321533, 0.8763002753257751, 0.0558176152408123, -0.41244077682495117, 0.5060011744499207, -0.010476701892912388, -0.3632049262523651, -0.4645599126815796, 0.06118658185005188, -0.0435299426317215, 0.3747233748435974, 0.47002825140953064, -0.12436933815479279, 0.07668916136026382, -0.17017491161823273, 0.32455939054489136, 0.253691166639328, -0.08748405426740646, -0.27260589599609375, 0.6683867573738098, -0.04442163184285164, -0.30257973074913025, 0.7422919869422913, -0.1856127828359604, -0.47119140625, 1.0931519269943237, 0.2702440321445465, 0.8738957047462463, -0.2630387842655182, 0.14202527701854706, 0.6240675449371338, 0.4040509760379791, -0.11210756748914719, 0.6279804110527039, 0.07160046696662903, -0.6133136749267578, -0.325334757566452, -0.7847174406051636, -0.22527039051055908, 0.46738359332084656, -1.0767203569412231, 0.4055071771144867, -0.1707489788532257, -0.20238113403320312, -0.1367979198694229, 0.49375051259994507, -0.7916309237480164, 0.20185048878192902, 0.10285613685846329, 0.9110078811645508, -1.0162702798843384, 0.635499119758606, 0.939537525177002, -0.425483375787735, -0.893310010433197, -0.3239442706108093, 0.11034363508224487, -0.8176222443580627, 0.4125041961669922, 0.3188112676143646, 0.3410760462284088, -0.2516251504421234, -0.6319155097007751, -1.0905735492706299, 1.4665831327438354, 0.214095339179039, -0.5740223526954651, 0.19734688103199005, 0.10258900374174118, 0.3494897484779358, -0.32703927159309387, 0.5693068504333496, 0.7426831126213074, 0.6841806173324585, -0.006184665486216545, -0.9406579732894897, 0.3321901857852936, -0.5251243114471436, -0.17044128477573395, 0.3312225341796875, -0.7928820848464966, 1.0259777307510376, -0.1453043818473816, -0.08848726749420166, 0.10354837775230408, 0.31280580163002014, 0.5922532081604004, 0.4198581576347351, 0.3837202191352844, 0.8230843544006348, 0.7257944345474243, -0.3425302505493164, 1.068902611732483, -0.41053086519241333, 0.9279893040657043, 1.0724565982818604, 0.007581802085042, 0.7172114253044128, 0.32821720838546753, -0.36127132177352905, 0.5712454319000244, 0.8619802594184875, -0.38609352707862854, 0.41380926966667175, 0.15364311635494232, -0.07865991443395615, -0.07038950175046921, 0.02117283083498478, -0.47363778948783875, 0.3428937494754791, 0.22135770320892334, -0.5778598785400391, -0.2640504837036133, -0.4023303687572479, 0.14071863889694214, -0.2850123941898346, -0.1799902468919754, 0.5650166273117065, -0.02452825754880905, -0.3675801753997803, 0.7059333920478821, 0.012519946321845055, 0.6889860033988953, -0.6032665371894836, -0.10817356407642365, -0.2908197343349457, 0.28909263014793396, -0.587630569934845, -0.9407249689102173, 0.21137472987174988, 0.07191996276378632, -0.20707526803016663, -0.24853861331939697, 0.5451875329017639, -0.21935085952281952, -0.581877589225769, 0.5478913187980652, 0.4392707049846649, 0.37696972489356995, 0.08378193527460098, -0.9376185536384583, 0.14286331832408905, 0.29327642917633057, -0.7670119404792786, 0.3416765332221985, 0.3157488703727722, 0.1158517450094223, 0.5381359457969666, 0.7509056329727173, 0.08345550298690796, 0.08672790974378586, -0.09391951560974121, 1.0651140213012695, -0.8110096454620361, -0.33669906854629517, -0.8377062082290649, 0.8450164794921875, -0.2654685378074646, -0.603448212146759, 0.9276004433631897, 1.037461757659912, 0.87923663854599, 0.16424743831157684, 0.8005298376083374, -0.4245704412460327, 0.5626137852668762, -0.21591593325138092, 0.808478832244873, -0.8699719309806824, 0.30482539534568787, -0.2540279030799866, -0.8471828699111938, -0.11208520084619522, 0.8085988759994507, -0.319367378950119, -0.07251870632171631, 0.5360435843467712, 1.0087207555770874, 0.01194373331964016, 0.16898998618125916, -0.033368464559316635, 0.42971453070640564, 0.2190166562795639, 0.5825316309928894, 0.6104546785354614, -0.6948135495185852, 0.47703543305397034, -0.5934349298477173, -0.4374360144138336, -0.17742925882339478, -0.6091082096099854, -0.7636687755584717, -0.4977749288082123, -0.3094477951526642, -0.540679395198822, -0.0566459596157074, 0.9526889324188232, 0.49321115016937256, -0.8780807852745056, -0.5560075044631958, 0.0064315516501665115, 0.18406911194324493, -0.20407554507255554, -0.3534969985485077, 0.5356888175010681, -0.14213785529136658, -0.8272087574005127, 0.44526124000549316, -0.04094056785106659, -0.1647491604089737, 0.03150562196969986, -0.22870871424674988, -0.38529732823371887, -0.23469975590705872, 0.35995349287986755, 0.15251466631889343, -0.6195776462554932, -0.17830321192741394, 0.033177703619003296, -0.05524498596787453, 0.33211246132850647, 0.3359246253967285, -0.5554500818252563, 0.020047221332788467, 0.5233756899833679, 0.260911762714386, 0.7666218280792236, -0.0433833934366703, 0.213795006275177, -0.7375066876411438, -0.0784805491566658, -0.00487141590565443, 0.5334011912345886, 0.25005173683166504, -0.4291074275970459, 1.021201252937317, 0.3137236535549164, -0.7477744221687317, -0.9135984182357788, -0.18036851286888123, -1.2141120433807373, 0.02690214104950428, 1.3699346780776978, -0.25516968965530396, -0.33243486285209656, 0.09431798756122589, -0.2023918777704239, 0.34230223298072815, -0.8091800808906555, 0.5725234746932983, 0.616898775100708, -0.39994850754737854, 0.013633228838443756, -0.7181245684623718, 0.2972058653831482, 0.10167443752288818, -0.8564333319664001, -0.06762496381998062, 0.2688567042350769, 0.45519933104515076, 0.3142295777797699, 0.6237152814865112, -0.05426657199859619, -0.17786666750907898, -0.08300581574440002, 0.26217472553253174, -0.06956133246421814, -0.0871666669845581, -0.2371882051229477, 0.08974486589431763, -0.40639176964759827, -0.5843613743782043 ]
open-llm-leaderboard/details_bigscience__bloom-560m
open-llm-leaderboard
2023-12-04T13:05:09Z
200
0
[ "region:us" ]
null
2023-08-18T00:14:29Z
--- pretty_name: Evaluation run of bigscience/bloom-560m dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 13 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bigscience__bloom-560m\"\ ,\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese\ \ are the [latest results from run 2023-12-04T13:05:03.033636](https://huggingface.co/datasets/open-llm-leaderboard/details_bigscience__bloom-560m/blob/main/results_2023-12-04T13-05-03.033636.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.003032600454890068,\n\ \ \"acc_stderr\": 0.0015145735612245468\n },\n \"harness|gsm8k|5\"\ : {\n \"acc\": 0.003032600454890068,\n \"acc_stderr\": 0.0015145735612245468\n\ \ }\n}\n```" repo_url: https://huggingface.co/bigscience/bloom-560m leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|arc:challenge|25_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-09T09:50:46.994927.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_17T01_44_51.787860 path: - '**/details_harness|drop|3_2023-10-17T01-44-51.787860.parquet' - split: 2023_10_19T07_58_25.532907 path: - '**/details_harness|drop|3_2023-10-19T07-58-25.532907.parquet' - split: 2023_10_19T11_57_26.532188 path: - '**/details_harness|drop|3_2023-10-19T11-57-26.532188.parquet' - split: 2023_10_19T13_58_30.472160 path: - '**/details_harness|drop|3_2023-10-19T13-58-30.472160.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T13-58-30.472160.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_17T01_44_51.787860 path: - '**/details_harness|gsm8k|5_2023-10-17T01-44-51.787860.parquet' - split: 2023_10_19T07_58_25.532907 path: - '**/details_harness|gsm8k|5_2023-10-19T07-58-25.532907.parquet' - split: 2023_10_19T11_57_26.532188 path: - '**/details_harness|gsm8k|5_2023-10-19T11-57-26.532188.parquet' - split: 2023_10_19T13_58_30.472160 path: - '**/details_harness|gsm8k|5_2023-10-19T13-58-30.472160.parquet' - split: 2023_12_03T15_01_55.935382 path: - '**/details_harness|gsm8k|5_2023-12-03T15-01-55.935382.parquet' - split: 2023_12_03T15_02_09.067243 path: - '**/details_harness|gsm8k|5_2023-12-03T15-02-09.067243.parquet' - split: 2023_12_03T16_04_42.088670 path: - '**/details_harness|gsm8k|5_2023-12-03T16-04-42.088670.parquet' - split: 2023_12_03T16_05_29.861058 path: - '**/details_harness|gsm8k|5_2023-12-03T16-05-29.861058.parquet' - split: 2023_12_04T09_54_26.106896 path: - '**/details_harness|gsm8k|5_2023-12-04T09-54-26.106896.parquet' - split: 2023_12_04T09_54_41.464190 path: - '**/details_harness|gsm8k|5_2023-12-04T09-54-41.464190.parquet' - split: 2023_12_04T13_04_03.136528 path: - '**/details_harness|gsm8k|5_2023-12-04T13-04-03.136528.parquet' - split: 2023_12_04T13_05_03.033636 path: - '**/details_harness|gsm8k|5_2023-12-04T13-05-03.033636.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-12-04T13-05-03.033636.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hellaswag|10_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-09T09:50:46.994927.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-management|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T09:50:46.994927.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_09T09_50_46.994927 path: - '**/details_harness|truthfulqa:mc|0_2023-08-09T09:50:46.994927.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-09T09:50:46.994927.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_17T01_44_51.787860 path: - '**/details_harness|winogrande|5_2023-10-17T01-44-51.787860.parquet' - split: 2023_10_19T07_58_25.532907 path: - '**/details_harness|winogrande|5_2023-10-19T07-58-25.532907.parquet' - split: 2023_10_19T11_57_26.532188 path: - '**/details_harness|winogrande|5_2023-10-19T11-57-26.532188.parquet' - split: 2023_10_19T13_58_30.472160 path: - '**/details_harness|winogrande|5_2023-10-19T13-58-30.472160.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T13-58-30.472160.parquet' - config_name: results data_files: - split: 2023_08_09T09_50_46.994927 path: - results_2023-08-09T09:50:46.994927.parquet - split: 2023_10_17T01_44_51.787860 path: - results_2023-10-17T01-44-51.787860.parquet - split: 2023_10_19T07_58_25.532907 path: - results_2023-10-19T07-58-25.532907.parquet - split: 2023_10_19T11_57_26.532188 path: - results_2023-10-19T11-57-26.532188.parquet - split: 2023_10_19T13_58_30.472160 path: - results_2023-10-19T13-58-30.472160.parquet - split: 2023_12_03T15_01_55.935382 path: - results_2023-12-03T15-01-55.935382.parquet - split: 2023_12_03T15_02_09.067243 path: - results_2023-12-03T15-02-09.067243.parquet - split: 2023_12_03T16_04_42.088670 path: - results_2023-12-03T16-04-42.088670.parquet - split: 2023_12_03T16_05_29.861058 path: - results_2023-12-03T16-05-29.861058.parquet - split: 2023_12_04T09_54_26.106896 path: - results_2023-12-04T09-54-26.106896.parquet - split: 2023_12_04T09_54_41.464190 path: - results_2023-12-04T09-54-41.464190.parquet - split: 2023_12_04T13_04_03.136528 path: - results_2023-12-04T13-04-03.136528.parquet - split: 2023_12_04T13_05_03.033636 path: - results_2023-12-04T13-05-03.033636.parquet - split: latest path: - results_2023-12-04T13-05-03.033636.parquet --- # Dataset Card for Evaluation run of bigscience/bloom-560m ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/bigscience/bloom-560m - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 13 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bigscience__bloom-560m", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-04T13:05:03.033636](https://huggingface.co/datasets/open-llm-leaderboard/details_bigscience__bloom-560m/blob/main/results_2023-12-04T13-05-03.033636.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.003032600454890068, "acc_stderr": 0.0015145735612245468 }, "harness|gsm8k|5": { "acc": 0.003032600454890068, "acc_stderr": 0.0015145735612245468 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.2931853234767914, -0.5749796032905579, 0.3471618592739105, 0.29925400018692017, -0.04265860095620155, 0.059555534273386, -0.5216355919837952, -0.1777316927909851, 0.34368303418159485, 0.5080063343048096, -0.6792412996292114, -1.0071399211883545, -0.6487745046615601, 0.20050109922885895, -0.18222101032733917, 1.224001407623291, -0.17890244722366333, -0.10706895589828491, -0.11805377155542374, -0.17696820199489594, -0.26466238498687744, -0.3774159252643585, -0.4536074101924896, -0.40996190905570984, 0.48226866126060486, 0.5955169796943665, 0.3508504629135132, 0.6367246508598328, 0.8423089981079102, 0.32401153445243835, -0.19719448685646057, 0.185311958193779, -0.6652357578277588, -0.13895061612129211, 0.02899341657757759, -0.5739643573760986, -0.7001339197158813, 0.09497139602899551, 0.8228858113288879, 0.5061631202697754, -0.09948747605085373, 0.6650445461273193, 0.040894024074077606, 0.5659636855125427, -0.6249014735221863, 0.5408204197883606, -0.34966975450515747, 0.12376473098993301, -0.43796563148498535, -0.09095312654972076, -0.12650151550769806, -0.2245529592037201, -0.14257624745368958, -0.5000344514846802, 0.26287147402763367, 0.06994060426950455, 0.9878277778625488, 0.16323602199554443, 0.016568545252084732, -0.033106155693531036, -0.4769134223461151, 0.9005412459373474, -0.8537025451660156, -0.13855627179145813, 0.5488618016242981, 0.1535479724407196, -0.17642013728618622, -0.6802703142166138, -0.3901842534542084, 0.04892633482813835, -0.1810351312160492, 0.25972822308540344, -0.01396140642464161, -0.10746051371097565, 0.4985574185848236, 0.8948007822036743, -0.7415999174118042, 0.06410742551088333, -0.6011473536491394, -0.1842385083436966, 1.0286062955856323, 0.45397141575813293, -0.0665745735168457, -0.40816232562065125, -0.3811508119106293, -0.4261133372783661, -0.3717672824859619, -0.0007787916110828519, 0.24237076938152313, 0.5538507103919983, -0.5201882719993591, 0.8375092148780823, -0.2434849888086319, 0.4507971704006195, -0.24791668355464935, -0.15825794637203217, 0.8980237245559692, -0.6435128450393677, -0.2678004205226898, -0.0909513384103775, 1.1033354997634888, 0.2970237731933594, -0.045613933354616165, 0.21099445223808289, -0.19354844093322754, -0.1124233826994896, 0.04249215126037598, -0.8388944268226624, -0.11591614782810211, 0.44024404883384705, -0.42557379603385925, -0.2958322763442993, 0.30518701672554016, -1.1295137405395508, -0.18430469930171967, -0.2555835545063019, 0.27037638425827026, -0.2702144384384155, -0.3502558171749115, -0.09247761964797974, -0.09503624588251114, 0.32741835713386536, 0.12921500205993652, -0.569267749786377, 0.36412501335144043, 0.7859886288642883, 1.10993492603302, -0.3590511977672577, -0.46079346537590027, -0.30491068959236145, -0.12182782590389252, -0.1141602024435997, 0.3787180483341217, -0.2530660331249237, -0.5187093615531921, -0.08794494718313217, 0.32612618803977966, -0.28303176164627075, -0.6497799754142761, 0.7223027944564819, -0.31985601782798767, 0.15367668867111206, -0.3610321879386902, -0.5687302350997925, -0.05642818287014961, 0.31678828597068787, -0.6402795910835266, 1.338346004486084, 0.2750321328639984, -0.8389107584953308, 0.20660287141799927, -1.0406463146209717, -0.20647810399532318, 0.10664711892604828, 0.04248245432972908, -0.6774847507476807, -0.1678301841020584, 0.1245746836066246, 0.4783926010131836, -0.20860645174980164, 0.08798327296972275, -0.2948954701423645, -0.3715124726295471, 0.07860036194324493, -0.13918998837471008, 0.9773233532905579, 0.2490907460451126, -0.3473007380962372, 0.2788514792919159, -0.9193767309188843, -0.03233867138624191, 0.32180193066596985, -0.45867639780044556, 0.005271839443594217, -0.22653783857822418, 0.25801873207092285, -0.015056329779326916, 0.4687105417251587, -0.6192566156387329, 0.3520160913467407, -0.17620176076889038, 0.33573806285858154, 0.9464670419692993, 0.06948484480381012, 0.1398397535085678, -0.42983895540237427, 0.5223293304443359, 0.1323939859867096, 0.277302622795105, -0.07221271097660065, -0.5693227648735046, -0.8093506097793579, -0.3697415590286255, 0.07134992629289627, 0.620475709438324, -0.20381759107112885, 0.7935143113136292, -0.5528070330619812, -0.7175700068473816, -0.6248719692230225, 0.1405966430902481, 0.40623778104782104, 0.4776437282562256, 0.5096403956413269, -0.21070609986782074, -0.705011248588562, -0.9496893286705017, 0.09170565009117126, -0.06255277991294861, 0.14763931930065155, 0.5229620933532715, 1.1010055541992188, -0.44498246908187866, 0.8063569664955139, -0.9304236173629761, -0.33905747532844543, -0.14220976829528809, 0.060138288885354996, 0.8673174381256104, 0.49225568771362305, 0.31654125452041626, -0.6695010662078857, -0.2938614785671234, -0.16356261074543, -0.6972951889038086, -0.13711535930633545, 0.007202319335192442, -0.41186586022377014, 0.19758941233158112, -0.0015787179581820965, -0.5819708704948425, 0.5115048289299011, 0.43690818548202515, -0.5088202357292175, 0.7992458343505859, 0.015615351498126984, 0.3892926275730133, -1.1515752077102661, 0.367903470993042, 0.08534734696149826, 0.02346753515303135, -0.26441091299057007, -0.04470164328813553, -0.0922500491142273, 0.15573325753211975, -0.4690927267074585, 0.5813643336296082, -0.3354640007019043, -0.26546722650527954, 0.041905082762241364, -0.04683006554841995, -0.005876261740922928, 0.43541419506073, -0.20805442333221436, 0.889367401599884, 0.5359335541725159, -0.4130485951900482, 0.5393303632736206, 0.5329603552818298, -0.5269005298614502, 0.1937134712934494, -0.5347332954406738, -0.018202856183052063, 0.18581822514533997, 0.24620482325553894, -0.9315429329872131, -0.5128059983253479, 0.3057447671890259, -0.4195519983768463, 0.2653922736644745, -0.19175827503204346, -0.7110262513160706, -0.5668420195579529, -0.4173754155635834, 0.30580639839172363, 0.5789014101028442, -0.491718590259552, 0.12119381129741669, 0.26411503553390503, 0.1100560650229454, -0.492689847946167, -0.833747386932373, -0.13502271473407745, -0.3855988383293152, -0.6802908778190613, 0.4580586552619934, -0.2377232313156128, -0.26207494735717773, -0.06878035515546799, 0.12439236789941788, 0.013276837766170502, -0.019957266747951508, 0.31780004501342773, 0.4755782186985016, -0.19937588274478912, -0.1433086097240448, -0.35738351941108704, -0.08517569303512573, 0.18060576915740967, 0.08759499341249466, 0.5808294415473938, -0.20588593184947968, -0.40797027945518494, -0.2565474510192871, 0.11979476362466812, 0.5743088722229004, -0.1770026683807373, 0.8776505589485168, 0.827630341053009, -0.2724483907222748, 0.008608822710812092, -0.4245605766773224, -0.06576349586248398, -0.4437924027442932, 0.44723761081695557, -0.09350661188364029, -0.7073945999145508, 0.7512495517730713, 0.20067358016967773, 0.2776581048965454, 0.796754777431488, 0.6493856906890869, 0.16248761117458344, 0.5916011929512024, 0.23857779800891876, -0.20996278524398804, 0.49612855911254883, -0.8066712021827698, -0.14163313806056976, -1.1788356304168701, -0.30481135845184326, -0.42073482275009155, -0.4315165579319, -0.5914216041564941, -0.5521853566169739, 0.35642722249031067, 0.03734312951564789, -0.5046175122261047, 0.32379165291786194, -0.6709985733032227, 0.21591529250144958, 0.58119797706604, 0.2066550999879837, 0.16815021634101868, -0.07346729934215546, 0.032524559646844864, 0.13228347897529602, -0.4123519957065582, -0.3843596279621124, 1.4167433977127075, 0.30076169967651367, 0.5433799028396606, 0.0125674894079566, 0.8929052948951721, 0.28113704919815063, 0.30579331517219543, -0.5958168506622314, 0.6350336074829102, -0.039478108286857605, -0.5484556555747986, -0.25016093254089355, -0.5897305011749268, -1.005885362625122, 0.20129288733005524, -0.09861984103918076, -0.8623422384262085, -0.021258549764752388, 0.028127413243055344, -0.16402797400951385, 0.35033687949180603, -0.5636937618255615, 0.9219606518745422, -0.19427727162837982, -0.5226544737815857, -0.035087279975414276, -0.7191486954689026, 0.4417506158351898, 0.009219455532729626, 0.39922285079956055, -0.22598566114902496, 0.03524141386151314, 1.1170405149459839, -0.5912100076675415, 0.8312629461288452, -0.22013656795024872, 0.11981110274791718, 0.28128355741500854, -0.49856996536254883, 0.4559631645679474, 0.0052410755306482315, -0.33083248138427734, 0.46380746364593506, -0.22095715999603271, -0.3521605432033539, -0.21425744891166687, 0.9498374462127686, -0.9162836670875549, -0.37473931908607483, -0.556674599647522, -0.5961653590202332, 0.11300360411405563, 0.2446640282869339, 0.4396994113922119, 0.1445506066083908, -0.10184495896100998, 0.19303861260414124, 0.44991838932037354, -0.18092766404151917, 0.554982602596283, 0.337711364030838, -0.37653207778930664, -0.6710056662559509, 0.6220468878746033, 0.2133222371339798, 0.11540169268846512, 0.20029865205287933, 0.1522904932498932, -0.3911357522010803, -0.5197006464004517, -0.312016099691391, 0.34200727939605713, -0.4776289463043213, -0.31119853258132935, -0.4137153923511505, -0.3776029348373413, -0.5560706853866577, 0.08993196487426758, -0.3695124089717865, -0.224538654088974, -0.5690144300460815, -0.22949284315109253, 0.753695547580719, 0.47426822781562805, -0.32619917392730713, 0.27236130833625793, -0.8010399341583252, 0.11002327501773834, -0.19208356738090515, 0.4569859802722931, -0.07837764173746109, -0.5800752639770508, -0.5336272716522217, 0.1682138890028, -0.5121473670005798, -0.76296466588974, 0.4793340563774109, 0.1949654519557953, 0.5765426158905029, 0.1906268298625946, 0.028581995517015457, 0.7790467143058777, -0.33629313111305237, 1.1722275018692017, -0.040406208485364914, -0.6205170154571533, 0.8392435312271118, -0.3814575970172882, 0.2415524572134018, 0.508587121963501, 0.17314209043979645, -0.3939122259616852, -0.09761305898427963, -0.8723134994506836, -1.27997624874115, 0.9226487874984741, 0.4732570946216583, -0.2573916018009186, 0.08318144828081131, 0.36321383714675903, -0.02760079689323902, 0.12718062102794647, -0.6834167242050171, -0.6006408929824829, 0.0074525740928947926, -0.21199527382850647, -0.25249630212783813, -0.027555078268051147, -0.5804048776626587, -0.4050244688987732, 0.9112861752510071, 0.008421617560088634, 0.3833203613758087, 0.24569599330425262, -0.014103489927947521, -0.17195375263690948, 0.21062317490577698, 0.38242775201797485, 0.712090790271759, -0.3974457383155823, -0.15729719400405884, 0.23213112354278564, -0.7110297083854675, -0.11217942833900452, 0.24086369574069977, 0.12101254612207413, -0.19445982575416565, 0.7180365324020386, 0.888019323348999, 0.14607253670692444, -0.5094791054725647, 0.5760965943336487, 0.22949334979057312, -0.23162128031253815, -0.5635421872138977, 0.27438265085220337, -0.13335834443569183, 0.2730795443058014, 0.46752455830574036, -0.10222045332193375, 0.07321037352085114, -0.3022940456867218, 0.32371780276298523, 0.2558145821094513, -0.04032313823699951, -0.3289329707622528, 0.4790787696838379, -0.08547986298799515, -0.2348720133304596, 0.7424870133399963, -0.05533093959093094, -0.4676514267921448, 0.991240918636322, 0.37487179040908813, 0.9041683673858643, -0.007826948538422585, 0.06671634316444397, 0.684196412563324, 0.14945097267627716, -0.13937611877918243, 0.5327755808830261, 0.1283411979675293, -0.5172560214996338, -0.40052810311317444, -0.9525559544563293, -0.2789468765258789, 0.37243011593818665, -0.9980837106704712, 0.4268319606781006, -0.2269599884748459, -0.21926088631153107, -0.045715827494859695, 0.3471898138523102, -0.8536630868911743, 0.09140309691429138, 0.10329490154981613, 0.93384850025177, -1.0999510288238525, 0.4341525137424469, 0.8234096169471741, -0.5496436953544617, -0.7505641579627991, -0.2366267889738083, 0.0888892263174057, -0.7936910390853882, 0.4895184338092804, 0.2581845819950104, 0.4604281187057495, -0.2096322625875473, -0.7298086881637573, -1.0958153009414673, 1.530348539352417, 0.12024300545454025, -0.551347017288208, 0.23989221453666687, 0.20303374528884888, 0.4898902475833893, -0.3189550042152405, 0.47598710656166077, 0.6774910092353821, 0.862206757068634, 0.256592720746994, -0.8519308567047119, 0.15593193471431732, -0.4258885979652405, -0.08693099766969681, 0.2108526974916458, -0.9628737568855286, 1.0264639854431152, -0.17334376275539398, -0.026045119389891624, -0.009141196496784687, 0.5468162298202515, 0.5883395075798035, 0.4238307476043701, 0.3279460668563843, 0.8933002948760986, 0.6984046697616577, -0.43883731961250305, 0.981083333492279, -0.17319023609161377, 0.8676155805587769, 1.0648996829986572, -0.055225856602191925, 0.7814751863479614, 0.36387181282043457, -0.6909329295158386, 0.4670049250125885, 0.8385820984840393, -0.6156755685806274, 0.3880242109298706, 0.3428274989128113, -0.02494036592543125, 0.035138145089149475, 0.07565663754940033, -0.5705134868621826, 0.4057379961013794, 0.2667483687400818, -0.6006051898002625, -0.1345313936471939, -0.21458078920841217, 0.15120825171470642, -0.3117290735244751, -0.3618362843990326, 0.5864390730857849, -0.07370560616254807, -0.5708020329475403, 0.6221684813499451, -0.22303910553455353, 0.7190658450126648, -0.6509442329406738, -0.11576026678085327, -0.1412190943956375, 0.30586251616477966, -0.6693487763404846, -0.9951136112213135, 0.34818950295448303, 0.1357935518026352, -0.39949873089790344, -0.05976945161819458, 0.38121071457862854, -0.2897741198539734, -0.7049574255943298, 0.33184701204299927, 0.278843492269516, 0.40365132689476013, 0.16958603262901306, -0.8603583574295044, 0.28465625643730164, 0.09297283738851547, -0.7135280966758728, 0.2871338725090027, 0.22195132076740265, 0.194571852684021, 0.4013616442680359, 0.7946296334266663, 0.2356366217136383, 0.09230637550354004, 0.00560194393619895, 1.0191165208816528, -0.7785340547561646, -0.30765414237976074, -0.8952715992927551, 0.8482726812362671, -0.3106127083301544, -0.6539046168327332, 0.8297455906867981, 1.1386494636535645, 0.6877509355545044, 0.11509974300861359, 0.7732173204421997, -0.30957794189453125, 0.5092071890830994, -0.3815554976463318, 0.7996103763580322, -0.6393290758132935, 0.314619779586792, -0.3396480679512024, -0.9991313815116882, -0.13767489790916443, 0.5482259392738342, -0.30919837951660156, 0.118483766913414, 0.5482342839241028, 0.8351375460624695, -0.04325886443257332, 0.22167403995990753, -0.12967798113822937, 0.5682202577590942, 0.46849897503852844, 0.5469653010368347, 0.4383106827735901, -0.5308346748352051, 0.32642656564712524, -0.49084708094596863, -0.5552964806556702, -0.1654408574104309, -0.7166816592216492, -0.6387338042259216, -0.4876222014427185, -0.3923916220664978, -0.5539894700050354, -0.1115996390581131, 0.9779766798019409, 0.5859805345535278, -0.9104815125465393, -0.3592336177825928, -0.07312610000371933, -0.007094935048371553, -0.18042269349098206, -0.35631266236305237, 0.6694049835205078, -0.2657514214515686, -0.8065571784973145, 0.3413466215133667, -0.03525100648403168, -0.2333812117576599, -0.24943311512470245, -0.0750047117471695, -0.4219955503940582, -0.3383220434188843, 0.39549872279167175, 0.22893646359443665, -0.6048368811607361, -0.25617387890815735, -0.17117446660995483, 0.03136295825242996, 0.30132725834846497, 0.24326469004154205, -0.4791939854621887, 0.14744305610656738, 0.5464817881584167, 0.30042406916618347, 0.7705329060554504, -0.08515798300504684, 0.22595231235027313, -0.7636653184890747, -0.0821148231625557, 0.19334426522254944, 0.5766405463218689, 0.23610936105251312, -0.5377680063247681, 0.9928299188613892, 0.38389652967453003, -0.833094596862793, -0.8992514610290527, -0.08977928757667542, -1.1652621030807495, 0.03328888118267059, 1.5370326042175293, -0.3346121311187744, -0.3611019253730774, 0.27875882387161255, -0.12409885972738266, 0.31771552562713623, -0.6267854571342468, 0.5416759252548218, 0.807479202747345, -0.3750843405723572, 0.04926259070634842, -0.43855276703834534, 0.33074551820755005, 0.10678422451019287, -1.001786470413208, 0.12299313396215439, 0.4145412743091583, 0.3845713436603546, 0.3402676284313202, 0.4911467730998993, -0.1771107316017151, -0.15541061758995056, -0.11356378346681595, 0.15940886735916138, -0.18543757498264313, -0.0425921268761158, -0.25990235805511475, 0.13062788546085358, -0.3910400867462158, -0.4133742153644562 ]
open-llm-leaderboard/details_TFLai__gpt2-turkish-uncased
open-llm-leaderboard
2023-12-02T15:29:53Z
200
0
[ "region:us" ]
null
2023-08-18T00:18:04Z
--- pretty_name: Evaluation run of TFLai/gpt2-turkish-uncased dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TFLai/gpt2-turkish-uncased](https://huggingface.co/TFLai/gpt2-turkish-uncased)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TFLai__gpt2-turkish-uncased\"\ ,\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese\ \ are the [latest results from run 2023-12-02T15:29:40.186292](https://huggingface.co/datasets/open-llm-leaderboard/details_TFLai__gpt2-turkish-uncased/blob/main/results_2023-12-02T15-29-40.186292.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.0,\n \"\ acc_stderr\": 0.0\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \ \ \"acc_stderr\": 0.0\n }\n}\n```" repo_url: https://huggingface.co/TFLai/gpt2-turkish-uncased leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|arc:challenge|25_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T09:48:46.264649.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T01_34_05.823968 path: - '**/details_harness|drop|3_2023-10-22T01-34-05.823968.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T01-34-05.823968.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T01_34_05.823968 path: - '**/details_harness|gsm8k|5_2023-10-22T01-34-05.823968.parquet' - split: 2023_12_02T15_29_40.186292 path: - '**/details_harness|gsm8k|5_2023-12-02T15-29-40.186292.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-12-02T15-29-40.186292.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hellaswag|10_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T09:48:46.264649.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:48:46.264649.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T09_48_46.264649 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T09:48:46.264649.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T09:48:46.264649.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T01_34_05.823968 path: - '**/details_harness|winogrande|5_2023-10-22T01-34-05.823968.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T01-34-05.823968.parquet' - config_name: results data_files: - split: 2023_07_24T09_48_46.264649 path: - results_2023-07-24T09:48:46.264649.parquet - split: 2023_10_22T01_34_05.823968 path: - results_2023-10-22T01-34-05.823968.parquet - split: 2023_12_02T15_29_40.186292 path: - results_2023-12-02T15-29-40.186292.parquet - split: latest path: - results_2023-12-02T15-29-40.186292.parquet --- # Dataset Card for Evaluation run of TFLai/gpt2-turkish-uncased ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TFLai/gpt2-turkish-uncased - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TFLai/gpt2-turkish-uncased](https://huggingface.co/TFLai/gpt2-turkish-uncased) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TFLai__gpt2-turkish-uncased", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-02T15:29:40.186292](https://huggingface.co/datasets/open-llm-leaderboard/details_TFLai__gpt2-turkish-uncased/blob/main/results_2023-12-02T15-29-40.186292.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.304031103849411, -0.6230329275131226, 0.1553722769021988, 0.19578944146633148, -0.35118407011032104, 0.07012055814266205, -0.4401276409626007, -0.21263079345226288, 0.22257930040359497, 0.54567551612854, -0.5779430866241455, -0.9191017150878906, -0.8142338395118713, 0.0666385367512703, -0.09968342632055283, 1.303006887435913, -0.44223207235336304, -0.11302265524864197, 0.10739707946777344, -0.28942441940307617, -0.31941115856170654, -0.28265345096588135, -0.6137812733650208, -0.4727132320404053, 0.2925195097923279, 0.6465671062469482, 0.3490789830684662, 0.5882730484008789, 0.5855370163917542, 0.3162042796611786, 0.049503784626722336, 0.16541540622711182, -0.48971378803253174, -0.14906390011310577, 0.12680144608020782, -0.44005462527275085, -0.6520553827285767, 0.14389251172542572, 0.8686228394508362, 0.34545257687568665, -0.09589158743619919, 0.6269257068634033, 0.14091193675994873, 0.5909792184829712, -0.4569801688194275, 0.5942454934120178, -0.18034571409225464, 0.08278681337833405, -0.4682101309299469, -0.1502978801727295, 0.055468857288360596, -0.3417481482028961, -0.0948215126991272, -0.5018098950386047, 0.22665928304195404, -0.0361052080988884, 0.9585996270179749, 0.12861770391464233, -0.3000861406326294, -0.20474138855934143, -0.44858458638191223, 0.7769060134887695, -0.8223795890808105, -0.051682814955711365, 0.6162907481193542, 0.254044771194458, -0.2528564929962158, -0.6818663477897644, -0.333609014749527, 0.014530886895954609, -0.26800134778022766, 0.17034468054771423, 0.11576096713542938, -0.09528927505016327, 0.3665390908718109, 0.7089073657989502, -0.7747328877449036, -0.07471228390932083, -0.6412386894226074, -0.13222680985927582, 0.9907759428024292, 0.31321874260902405, 0.06944284588098526, -0.2538768947124481, -0.22975893318653107, -0.3496387004852295, -0.38580262660980225, 0.15912632644176483, 0.4056054949760437, 0.477780818939209, -0.6578980088233948, 0.9228342175483704, -0.32126736640930176, 0.45772168040275574, -0.10225102305412292, -0.34061187505722046, 0.7766051888465881, -0.6030109524726868, -0.2697009742259979, -0.0034846181515604258, 1.0181885957717896, 0.34907078742980957, 0.09621262550354004, 0.06662601977586746, -0.34424880146980286, -0.034233275800943375, -0.056558627635240555, -0.8333102464675903, -0.12829191982746124, 0.48462486267089844, -0.440565824508667, -0.45993348956108093, 0.3180050253868103, -0.8921511173248291, -0.2385525107383728, -0.16522493958473206, 0.2871853709220886, -0.07463837414979935, -0.5462456345558167, -0.19004996120929718, -0.12119777500629425, 0.40853118896484375, 0.10281171649694443, -0.7166707515716553, 0.3975869119167328, 0.8055964112281799, 1.0037837028503418, -0.17121180891990662, -0.35735923051834106, -0.47414225339889526, -0.1278066784143448, -0.16833022236824036, 0.48165276646614075, -0.2911438047885895, -0.4215814769268036, -0.017742738127708435, 0.2783504128456116, -0.33328479528427124, -0.5258767008781433, 0.7566897869110107, -0.2685447931289673, 0.3990417718887329, -0.37099623680114746, -0.533612847328186, -0.10277511924505234, 0.3300548195838928, -0.6598825454711914, 1.3738306760787964, 0.40163448452949524, -0.930491030216217, 0.24493741989135742, -1.0086658000946045, -0.3033333420753479, 0.020062634721398354, -0.09605932980775833, -0.6372528076171875, -0.14423547685146332, 0.20301644504070282, 0.5108479261398315, -0.3025449216365814, 0.19835886359214783, -0.2284485101699829, -0.5378671884536743, -0.04773450270295143, -0.025179382413625717, 1.019513487815857, 0.2424018830060959, -0.4755387604236603, 0.16559311747550964, -0.6968045234680176, -0.02462543174624443, 0.33877435326576233, -0.5362032055854797, 0.03367919102311134, -0.23447242379188538, 0.34540507197380066, 0.14262470602989197, 0.4084438383579254, -0.6360419988632202, 0.3484989106655121, -0.16389714181423187, 0.360217422246933, 0.8798885345458984, 0.08550827205181122, 0.24269656836986542, -0.5176209807395935, 0.48724883794784546, 0.11366702616214752, 0.26020488142967224, 0.29590049386024475, -0.6370660066604614, -0.7879010438919067, -0.11412278562784195, 0.14628949761390686, 0.705447256565094, -0.5693286657333374, 0.6014643311500549, -0.5219820141792297, -0.5934252142906189, -0.6146148443222046, 0.16034898161888123, 0.35509151220321655, 0.5374290347099304, 0.28756487369537354, -0.22864975035190582, -0.6710028052330017, -0.8502886295318604, -0.015277964994311333, -0.2650834918022156, -0.006404264830052853, 0.4172495901584625, 1.0036569833755493, -0.1904464215040207, 0.592093288898468, -0.5642128586769104, -0.551732063293457, -0.3515491783618927, 0.0647905021905899, 0.7181806564331055, 0.5562737584114075, 0.4264944791793823, -0.6526142358779907, -0.4185795187950134, -0.14598654210567474, -0.8187924027442932, -0.14776133000850677, -0.05005709454417229, -0.28902196884155273, 0.43293094635009766, -0.15688709914684296, -0.5371540784835815, 0.49575889110565186, 0.4420372247695923, -0.6925826072692871, 0.5515130162239075, 0.06937989592552185, 0.3948664963245392, -1.2560909986495972, 0.23512016236782074, 0.13276565074920654, 0.08209148049354553, -0.41088587045669556, -0.08330155164003372, -0.17660027742385864, 0.26081106066703796, -0.43493345379829407, 0.7008026242256165, -0.32227393984794617, -0.2406509965658188, 0.038781676441431046, 0.0371406115591526, -0.07287545502185822, 0.5795774459838867, -0.16027633845806122, 0.9117004871368408, 0.4731670916080475, -0.28053009510040283, 0.4620284140110016, 0.4771765470504761, -0.589314341545105, 0.18765945732593536, -0.5886591076850891, 0.08824758976697922, 0.18285085260868073, 0.21080008149147034, -0.9196276068687439, -0.449181467294693, 0.6147885322570801, -0.4789647161960602, 0.3884147107601166, -0.31884726881980896, -0.7614923119544983, -0.3619518578052521, -0.5506479740142822, 0.3452095687389374, 0.5247823596000671, -0.45745912194252014, 0.31358271837234497, 0.4678000509738922, 0.1018209233880043, -0.633569061756134, -0.8386614322662354, -0.13477976620197296, -0.4358147978782654, -0.6927695274353027, 0.27068567276000977, -0.11716775596141815, -0.2026088535785675, 0.013635022565722466, -0.036967869848012924, -0.10243965685367584, 0.04814755171537399, 0.2699209749698639, 0.3610503673553467, -0.21533694863319397, -0.2408585250377655, -0.2008601278066635, -0.056543972343206406, 0.13306303322315216, -0.07758276909589767, 0.40938514471054077, -0.31761202216148376, -0.13256868720054626, -0.25849631428718567, 0.1771424412727356, 0.4075407385826111, -0.10768996179103851, 0.8731693625450134, 0.9983330965042114, -0.352336049079895, 0.1397448033094406, -0.5322709679603577, 0.0805487260222435, -0.44342976808547974, 0.3465791642665863, -0.3366398513317108, -0.8894371390342712, 0.7445423603057861, 0.3288765251636505, 0.24912993609905243, 0.754847526550293, 0.6292874813079834, 0.06074194237589836, 0.6459280252456665, 0.3024311661720276, -0.228653222322464, 0.5154745578765869, -0.7335551977157593, -0.14542458951473236, -1.089073896408081, -0.4392204284667969, -0.4238099157810211, -0.3319871723651886, -0.905170738697052, -0.38713616132736206, 0.3066769540309906, 0.11829382181167603, -0.5674336552619934, 0.30525675415992737, -0.5517051219940186, 0.2805582880973816, 0.5157428979873657, 0.2377522885799408, 0.07856430858373642, 0.12768670916557312, -0.04947624355554581, 0.21095140278339386, -0.4414007365703583, -0.4609113037586212, 1.5131334066390991, 0.3219681680202484, 0.42958489060401917, 0.14383703470230103, 0.8843313455581665, 0.4470560848712921, 0.3534983992576599, -0.505293607711792, 0.523918092250824, -0.13166333734989166, -0.48095041513442993, -0.27703484892845154, -0.5158618092536926, -0.8467009663581848, 0.11924778670072556, -0.013384656980633736, -0.9204631447792053, -0.06762143224477768, -0.10871534049510956, -0.007503167726099491, 0.42542898654937744, -0.5646438598632812, 1.0230896472930908, -0.18623696267604828, -0.4280540943145752, 0.04329719766974449, -0.725553572177887, 0.47774916887283325, -0.019176535308361053, 0.14223812520503998, -0.24780331552028656, -0.08626008033752441, 1.198425531387329, -0.6793380975723267, 0.7622753977775574, -0.4203489124774933, 0.11644776910543442, 0.3714192807674408, -0.4169647693634033, 0.5839099884033203, 0.06501106172800064, -0.29457882046699524, 0.4702671468257904, -0.17107874155044556, -0.47304967045783997, -0.37962159514427185, 0.8587476015090942, -1.0855382680892944, -0.39604803919792175, -0.5503848791122437, -0.4678819179534912, 0.15427254140377045, 0.22729672491550446, 0.3374975919723511, 0.25011762976646423, -0.1114896684885025, 0.042185477912425995, 0.26086604595184326, 0.005538879428058863, 0.5428327918052673, 0.3670116066932678, -0.12576013803482056, -0.7405093312263489, 0.6326704621315002, 0.2199297994375229, -0.1564968377351761, 0.3132306635379791, 0.07380199432373047, -0.6004236936569214, -0.533430814743042, -0.5149233341217041, 0.3617827296257019, -0.5793923139572144, -0.1557968407869339, -0.36156749725341797, -0.26177459955215454, -0.49276676774024963, 0.19468002021312714, -0.32309672236442566, -0.49228668212890625, -0.4440411925315857, -0.28653040528297424, 0.575146496295929, 0.7083888649940491, -0.25701138377189636, 0.24343788623809814, -0.6961859464645386, 0.15821221470832825, -0.10601288080215454, 0.5504607558250427, -0.12408415228128433, -0.6432821750640869, -0.4494449198246002, 0.09720802307128906, -0.3639572262763977, -0.7639287114143372, 0.4287223517894745, 0.18506434559822083, 0.6473769545555115, 0.19436965882778168, 0.13996414840221405, 0.6539930701255798, -0.16051553189754486, 1.1152912378311157, -0.18973331153392792, -0.6209474205970764, 0.7401413321495056, -0.363334059715271, 0.17533241212368011, 0.5374893546104431, 0.11877962201833725, -0.4141847789287567, -0.13134810328483582, -0.712267279624939, -1.059152603149414, 1.0425187349319458, 0.6355749368667603, -0.2978060245513916, 0.19159111380577087, 0.3606804609298706, -0.010250662453472614, 0.11409586668014526, -0.6183370351791382, -0.623142421245575, -0.217054545879364, -0.19779613614082336, -0.23156706988811493, -0.26398319005966187, -0.5194971561431885, -0.4014859199523926, 0.9890779256820679, 0.05911343917250633, 0.5139628648757935, 0.13871817290782928, -0.045924726873636246, -0.22863703966140747, 0.25323814153671265, 0.6111019253730774, 0.7235608696937561, -0.47204965353012085, -0.18473458290100098, 0.18641382455825806, -0.6962063312530518, 0.049480147659778595, 0.3156934678554535, 0.036034636199474335, -0.15083639323711395, 0.7180972099304199, 0.9874316453933716, -0.12359953671693802, -0.27523723244667053, 0.5444209575653076, -0.006086121778935194, -0.28933680057525635, -0.6902932524681091, 0.10249613970518112, -0.1829611361026764, 0.44977620244026184, 0.44335031509399414, -0.18194936215877533, -0.03482426702976227, -0.22000150382518768, 0.3378380537033081, 0.09030470252037048, -0.032744184136390686, -0.3058519661426544, 0.5692547559738159, 0.036687228828668594, -0.21032263338565826, 0.9120393991470337, -0.11572091281414032, -0.5015972852706909, 0.919585645198822, 0.24495474994182587, 1.0877267122268677, -0.05403055250644684, 0.22086244821548462, 0.5400397181510925, 0.2917267978191376, -0.13530784845352173, 0.6315855383872986, -0.0438811369240284, -0.6566286683082581, -0.29254209995269775, -0.6951716542243958, -0.1840713620185852, 0.4859158396720886, -0.9146219491958618, 0.32603368163108826, -0.23263266682624817, -0.18294751644134521, -0.2563325762748718, 0.6816787123680115, -0.8926262259483337, 0.10016479343175888, 0.11962909996509552, 0.8799132108688354, -1.1762722730636597, 0.7708236575126648, 0.949855625629425, -0.5476710200309753, -0.8398295640945435, -0.17107310891151428, 0.08207820355892181, -0.765434205532074, 0.422525018453598, 0.28678035736083984, 0.48586857318878174, -0.31241148710250854, -0.5228102803230286, -0.8874969482421875, 1.5838253498077393, 0.15085650980472565, -0.28243187069892883, 0.21600884199142456, 0.4561693072319031, 0.4816688895225525, -0.45104557275772095, 0.6212607622146606, 0.8694464564323425, 0.8180229067802429, 0.12547920644283295, -0.9374793767929077, 0.3025248348712921, -0.6314316987991333, -0.05047031119465828, 0.14682532846927643, -0.899501383304596, 0.8577482104301453, -0.21168720722198486, -0.021133797243237495, -0.09710037708282471, 0.4706379175186157, 0.5431202054023743, 0.41418227553367615, 0.4109685719013214, 0.7455958724021912, 0.6223183870315552, -0.3675858676433563, 1.0521659851074219, -0.14518925547599792, 0.750466525554657, 1.1339247226715088, 0.10320277512073517, 0.6889898180961609, 0.3561227023601532, -0.4067944586277008, 0.38240569829940796, 1.0495792627334595, -0.44302642345428467, 0.5517885684967041, 0.09928557276725769, -0.06992675364017487, 0.011705532670021057, -0.08555315434932709, -0.5238444209098816, 0.48500606417655945, 0.23417840898036957, -0.5334847569465637, -0.2740553617477417, -0.3348018229007721, 0.21360285580158234, -0.48328641057014465, -0.23334838449954987, 0.6473272442817688, -0.02512715756893158, -0.5686935782432556, 0.73404860496521, -0.02517513558268547, 0.5687328577041626, -0.5781893134117126, -0.17513151466846466, -0.40998971462249756, 0.18693968653678894, -0.5534289479255676, -1.0077128410339355, 0.3618265986442566, 0.13722147047519684, -0.24503035843372345, -0.09194394201040268, 0.4710601270198822, -0.45505768060684204, -0.6416261196136475, 0.3272433876991272, 0.40758216381073, 0.4694344997406006, 0.10338444262742996, -0.973505437374115, 0.27439022064208984, 0.2115861475467682, -0.7985726594924927, 0.37955161929130554, 0.2642459273338318, -0.05101911723613739, 0.4458584785461426, 0.5278153419494629, 0.1254868060350418, -0.021946746855974197, 0.06344062089920044, 1.0242918729782104, -0.6199687719345093, -0.3893393576145172, -0.7895876169204712, 0.8220151662826538, -0.19029603898525238, -0.5534617304801941, 0.6212382912635803, 0.9725080728530884, 0.8866913914680481, 0.0960959792137146, 0.8955876231193542, -0.3623427152633667, 0.36950209736824036, -0.3952644467353821, 0.9134112000465393, -0.6863203644752502, 0.34526416659355164, -0.21788868308067322, -0.9496541023254395, -0.10904539376497269, 0.5422086119651794, -0.2825254499912262, 0.12621045112609863, 0.5746787190437317, 0.8702002167701721, -0.03601032495498657, 0.15238724648952484, -0.06491312384605408, 0.42429855465888977, 0.3597489595413208, 0.725992739200592, 0.3770420253276825, -0.7804948091506958, 0.4877215325832367, -0.6263816356658936, -0.40755411982536316, -0.04816179722547531, -0.7447168231010437, -0.9232624173164368, -0.5617279410362244, -0.42563605308532715, -0.576732337474823, 0.07345984131097794, 1.062330961227417, 0.43364477157592773, -1.0725172758102417, -0.48477068543434143, 0.05580587312579155, 0.15739168226718903, -0.15327657759189606, -0.3190964460372925, 0.754054844379425, -0.07082430273294449, -0.7572603225708008, 0.26998648047447205, -0.24344874918460846, -0.100090891122818, -0.04110928624868393, -0.2773842215538025, -0.32538163661956787, -0.44273248314857483, 0.3417530655860901, 0.23265717923641205, -0.7339855432510376, -0.34181711077690125, -0.35562995076179504, 0.012465052306652069, 0.24689850211143494, 0.3603934049606323, -0.47736606001853943, 0.18783722817897797, 0.611697256565094, 0.29197072982788086, 0.7773472666740417, -0.028694353997707367, 0.3285457193851471, -0.7410603165626526, 0.018729042261838913, 0.012201869860291481, 0.4865044057369232, 0.2857004702091217, -0.5419142842292786, 1.168660283088684, 0.40039512515068054, -0.6498706936836243, -0.7938072681427002, -0.23669201135635376, -1.0820338726043701, 0.14300121366977692, 1.4634032249450684, -0.3814859092235565, -0.19168050587177277, 0.030115123838186264, 0.019117869436740875, 0.6077249646186829, -0.7943499684333801, 0.6283793449401855, 0.7851126790046692, -0.30553731322288513, -0.13898569345474243, -0.5451511740684509, 0.47181615233421326, 0.004222037270665169, -0.7845088839530945, -0.11071660369634628, 0.2170739769935608, 0.5023503303527832, -0.046339426189661026, 0.750105619430542, -0.13322298228740692, -0.11970411986112595, -0.07089205086231232, 0.03567294776439667, -0.3901791274547577, -0.06672079116106033, -0.30484676361083984, 0.12215213477611542, -0.3473607003688812, -0.5300875902175903 ]
open-llm-leaderboard/details_ehartford__dolphin-llama-13b
open-llm-leaderboard
2023-10-22T08:31:14Z
200
0
[ "region:us" ]
null
2023-08-18T11:04:09Z
--- pretty_name: Evaluation run of ehartford/dolphin-llama-13b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [ehartford/dolphin-llama-13b](https://huggingface.co/ehartford/dolphin-llama-13b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__dolphin-llama-13b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T08:31:06.423580](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-llama-13b/blob/main/results_2023-10-22T08-31-06.423580.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.09867869127516779,\n\ \ \"em_stderr\": 0.003054155613095951,\n \"f1\": 0.1882760067114087,\n\ \ \"f1_stderr\": 0.0033481950499125467,\n \"acc\": 0.42166909111145284,\n\ \ \"acc_stderr\": 0.011280060733885005\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.09867869127516779,\n \"em_stderr\": 0.003054155613095951,\n\ \ \"f1\": 0.1882760067114087,\n \"f1_stderr\": 0.0033481950499125467\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.14404852160727824,\n \ \ \"acc_stderr\": 0.009672110973065284\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6992896606156275,\n \"acc_stderr\": 0.012888010494704725\n\ \ }\n}\n```" repo_url: https://huggingface.co/ehartford/dolphin-llama-13b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|arc:challenge|25_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|arc:challenge|25_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T16:19:11.269492.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T01_25_54.857200 path: - '**/details_harness|drop|3_2023-10-22T01-25-54.857200.parquet' - split: 2023_10_22T08_31_06.423580 path: - '**/details_harness|drop|3_2023-10-22T08-31-06.423580.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T08-31-06.423580.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T01_25_54.857200 path: - '**/details_harness|gsm8k|5_2023-10-22T01-25-54.857200.parquet' - split: 2023_10_22T08_31_06.423580 path: - '**/details_harness|gsm8k|5_2023-10-22T08-31-06.423580.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T08-31-06.423580.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hellaswag|10_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hellaswag|10_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T12:30:40.142317.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T12_30_40.142317 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T12:30:40.142317.parquet' - split: 2023_07_24T16_19_11.269492 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T16:19:11.269492.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T16:19:11.269492.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T01_25_54.857200 path: - '**/details_harness|winogrande|5_2023-10-22T01-25-54.857200.parquet' - split: 2023_10_22T08_31_06.423580 path: - '**/details_harness|winogrande|5_2023-10-22T08-31-06.423580.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T08-31-06.423580.parquet' - config_name: results data_files: - split: 2023_07_24T12_30_40.142317 path: - results_2023-07-24T12:30:40.142317.parquet - split: 2023_07_24T16_19_11.269492 path: - results_2023-07-24T16:19:11.269492.parquet - split: 2023_10_22T01_25_54.857200 path: - results_2023-10-22T01-25-54.857200.parquet - split: 2023_10_22T08_31_06.423580 path: - results_2023-10-22T08-31-06.423580.parquet - split: latest path: - results_2023-10-22T08-31-06.423580.parquet --- # Dataset Card for Evaluation run of ehartford/dolphin-llama-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/dolphin-llama-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/dolphin-llama-13b](https://huggingface.co/ehartford/dolphin-llama-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__dolphin-llama-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T08:31:06.423580](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-llama-13b/blob/main/results_2023-10-22T08-31-06.423580.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.09867869127516779, "em_stderr": 0.003054155613095951, "f1": 0.1882760067114087, "f1_stderr": 0.0033481950499125467, "acc": 0.42166909111145284, "acc_stderr": 0.011280060733885005 }, "harness|drop|3": { "em": 0.09867869127516779, "em_stderr": 0.003054155613095951, "f1": 0.1882760067114087, "f1_stderr": 0.0033481950499125467 }, "harness|gsm8k|5": { "acc": 0.14404852160727824, "acc_stderr": 0.009672110973065284 }, "harness|winogrande|5": { "acc": 0.6992896606156275, "acc_stderr": 0.012888010494704725 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.5119377970695496, -0.6381582617759705, 0.2728537917137146, 0.22083929181098938, -0.21521633863449097, 0.10989207029342651, -0.2912992238998413, -0.284641295671463, 0.4817872941493988, 0.5574377775192261, -0.7225359082221985, -0.9162381291389465, -0.6963086724281311, 0.3415033519268036, -0.03131413459777832, 1.0708101987838745, -0.2137168049812317, -0.3156346082687378, 0.1450682431459427, -0.3555111289024353, -0.3162859082221985, -0.36267900466918945, -0.5504255294799805, -0.44792652130126953, 0.38712093234062195, 0.5884366035461426, 0.2958422303199768, 0.8409964442253113, 0.6957706809043884, 0.37454602122306824, -0.17258048057556152, 0.30602654814720154, -0.44231435656547546, -0.10835239291191101, 0.24470672011375427, -0.5170774459838867, -0.7646917104721069, 0.11773159354925156, 0.6147640347480774, 0.49029579758644104, -0.268332302570343, 0.647991955280304, 0.102102130651474, 0.5836384296417236, -0.44577881693840027, 0.454736590385437, -0.2932696044445038, 0.0009635293972678483, -0.357604056596756, -0.15588174760341644, 0.021445227786898613, -0.33438578248023987, -0.13102194666862488, -0.667388916015625, 0.030247626826167107, 0.20308399200439453, 1.1172478199005127, 0.238318532705307, -0.19599860906600952, -0.29256027936935425, -0.17374947667121887, 0.9455510377883911, -0.9042172431945801, -0.10062570869922638, 0.7518200874328613, 0.0895729809999466, -0.30299660563468933, -0.5659700036048889, -0.3348473906517029, -0.13454195857048035, -0.18338456749916077, 0.22420339286327362, 0.028745124116539955, -0.09406615793704987, 0.380237877368927, 0.5987501740455627, -0.5680336356163025, 0.06782304495573044, -0.5463746786117554, -0.1640942394733429, 0.958656907081604, 0.38326355814933777, 0.07503735274076462, -0.5559948682785034, -0.3444449007511139, -0.3330516815185547, -0.41723546385765076, 0.23190666735172272, 0.48488426208496094, 0.4897249937057495, -0.685682475566864, 0.8650338649749756, -0.37850043177604675, 0.5637940764427185, -0.015363726764917374, -0.3252439498901367, 0.8234881162643433, -0.49476027488708496, -0.164964959025383, 0.12740939855575562, 1.0000605583190918, 0.463084876537323, -0.02058209478855133, 0.321330189704895, -0.309224933385849, -0.09557786583900452, 0.11826327443122864, -0.6860197186470032, -0.15564614534378052, 0.42929965257644653, -0.6276889443397522, -0.4848165214061737, 0.23542135953903198, -0.8953714966773987, -0.13187438249588013, -0.23248669505119324, 0.22496069967746735, -0.15235327184200287, -0.3544664680957794, -0.02301957830786705, -0.17597119510173798, 0.2627822756767273, 0.10058474540710449, -0.548865795135498, 0.38314977288246155, 0.6079056262969971, 0.9820547699928284, -0.09235747903585434, -0.40970084071159363, -0.25804197788238525, -0.0723930299282074, -0.19149328768253326, 0.4832225739955902, -0.2633152902126312, -0.4316752552986145, -0.20901529490947723, 0.32775160670280457, -0.24284698069095612, -0.5880057215690613, 0.7063525319099426, -0.25324687361717224, 0.18246449530124664, -0.31286805868148804, -0.4676748514175415, -0.16854938864707947, 0.41041189432144165, -0.7816813588142395, 1.469125509262085, 0.3277240991592407, -0.8535704016685486, 0.05769115313887596, -0.9243438839912415, -0.27037397027015686, -0.026805996894836426, 0.07709533721208572, -0.6663023233413696, -0.21019871532917023, 0.15898646414279938, 0.5170819163322449, -0.3909660279750824, 0.08244834840297699, -0.32245105504989624, -0.3456137478351593, 0.12952332198619843, 0.05437851324677467, 1.0387885570526123, 0.2697868049144745, -0.44471052289009094, 0.010448281653225422, -1.0596818923950195, 0.091254822909832, 0.3668356239795685, -0.6927013397216797, -0.234834223985672, -0.29234448075294495, 0.1095770001411438, 0.12378314882516861, 0.5587077736854553, -0.6741222739219666, 0.2911437153816223, -0.140962153673172, 0.39469748735427856, 0.9664981961250305, 0.03499550372362137, 0.23383526504039764, -0.5351086258888245, 0.5107313990592957, 0.012818707153201103, 0.33816155791282654, 0.06426798552274704, -0.7026393413543701, -0.6814135909080505, -0.29298850893974304, 0.056476205587387085, 0.5713791251182556, -0.4609541594982147, 0.7799370288848877, -0.3405071794986725, -0.6479476094245911, -0.746431827545166, 0.06546735763549805, 0.4160318970680237, 0.6039440631866455, 0.36974036693573, -0.3080524504184723, -0.7749007344245911, -1.001366376876831, 0.09781618416309357, -0.26668885350227356, 0.06740722060203552, 0.6200730204582214, 0.9808931946754456, -0.3285370469093323, 0.6643873453140259, -0.7264498472213745, -0.34767380356788635, -0.26468056440353394, 0.07376564294099808, 0.7906702160835266, 0.49005138874053955, 0.41597145795822144, -0.6305155158042908, -0.27588051557540894, -0.06454174220561981, -0.8202855587005615, -0.3125232458114624, -0.07551879435777664, -0.26460570096969604, 0.3139816224575043, -0.10330323129892349, -0.5452253222465515, 0.44951269030570984, 0.5153961777687073, -0.4823591113090515, 0.6106782555580139, -0.12666788697242737, 0.4122391641139984, -1.156741738319397, 0.19054944813251495, 0.09467687457799911, 0.06436847895383835, -0.385418176651001, -0.12647958099842072, 0.008520063944160938, 0.28104108572006226, -0.38165783882141113, 0.5092025399208069, -0.3958122432231903, -0.26481693983078003, 0.03693220019340515, 0.07407140731811523, -0.06139564514160156, 0.49888092279434204, -0.21212120354175568, 0.7100502252578735, 0.482262521982193, -0.4122907519340515, 0.3768922984600067, 0.5145235657691956, -0.47072964906692505, 0.20898480713367462, -0.5953249931335449, 0.0567932203412056, 0.17730574309825897, 0.21293258666992188, -0.8869818449020386, -0.416707843542099, 0.43627649545669556, -0.4428536295890808, 0.18355008959770203, -0.2719705104827881, -0.4650082290172577, -0.5506753921508789, -0.5960783362388611, 0.2057548612356186, 0.4992215931415558, -0.5487198233604431, 0.35655874013900757, 0.5212487578392029, 0.047744110226631165, -0.7030959725379944, -0.7501246929168701, -0.0659673810005188, -0.4087267816066742, -0.6276887059211731, 0.45946604013442993, -0.09805498272180557, -0.24815484881401062, 0.030599718913435936, -0.016857899725437164, -0.09177900850772858, 0.15993015468120575, 0.3221193552017212, 0.5591638088226318, -0.13579553365707397, -0.38287732005119324, -0.21417236328125, -0.15216010808944702, 0.1499045044183731, 0.14798888564109802, 0.5724913477897644, -0.2815941274166107, -0.26028046011924744, -0.2678355276584625, 0.09943349659442902, 0.48933571577072144, -0.11050444841384888, 0.7833119630813599, 0.6071799993515015, -0.2080076038837433, -0.04392016679048538, -0.4321288466453552, -0.004238571040332317, -0.4975093603134155, 0.3069034516811371, -0.3449920117855072, -0.7661115527153015, 0.8890358209609985, 0.16715243458747864, 0.11700104176998138, 0.7197739481925964, 0.50017911195755, 0.025724375620484352, 0.7427756786346436, 0.3120775818824768, -0.1704389750957489, 0.42057204246520996, -0.734525203704834, -0.06013599783182144, -1.1576972007751465, -0.4445749819278717, -0.4642251133918762, -0.36608850955963135, -0.8518545031547546, -0.2300669103860855, 0.24111121892929077, 0.10670383274555206, -0.5454440116882324, 0.5213924646377563, -0.6893402338027954, 0.22093838453292847, 0.6111927628517151, 0.29932111501693726, 0.05024178326129913, -0.08311379700899124, -0.09436643123626709, 0.2580997943878174, -0.5568378567695618, -0.494955450296402, 1.4144726991653442, 0.270855188369751, 0.6627513766288757, 0.032445408403873444, 1.0366219282150269, 0.3391527533531189, 0.2619783282279968, -0.5438098907470703, 0.6195603013038635, -0.007373657543212175, -0.46702250838279724, -0.2628311216831207, -0.5757203698158264, -0.9597033858299255, 0.1795860230922699, 0.05756588652729988, -0.8257018327713013, 0.07852718234062195, -0.07462497800588608, -0.08243432641029358, 0.342351496219635, -0.4606338441371918, 0.7257413268089294, -0.23279736936092377, -0.3531605005264282, -0.0018851913046091795, -0.8399372100830078, 0.414181113243103, -0.09016867727041245, 0.3950211703777313, -0.32816827297210693, -0.14518627524375916, 1.1447948217391968, -0.683667004108429, 0.810987114906311, -0.15097689628601074, 0.07176391780376434, 0.3995082378387451, -0.3519596457481384, 0.5910345911979675, -0.12601859867572784, -0.30499696731567383, 0.46347203850746155, -0.19156844913959503, -0.2858988046646118, -0.13296884298324585, 0.8456105589866638, -0.963495135307312, -0.34708350896835327, -0.451540470123291, -0.5550798177719116, 0.2641163468360901, 0.18146593868732452, 0.30373045802116394, 0.28605297207832336, 0.13297182321548462, 0.11811590194702148, 0.2680545449256897, -0.25745823979377747, 0.5416364669799805, 0.49747276306152344, -0.06042793020606041, -0.7208938002586365, 0.6343685984611511, 0.24950753152370453, 0.047903209924697876, 0.1076081395149231, 0.12905782461166382, -0.6050543189048767, -0.39707764983177185, -0.4526119828224182, 0.35043972730636597, -0.6289417147636414, -0.31637686491012573, -0.39378678798675537, -0.17319075763225555, -0.30179721117019653, -0.01774035580456257, -0.42652449011802673, -0.5112683176994324, -0.48000022768974304, -0.2331063598394394, 0.6948528289794922, 0.762882649898529, -0.47551462054252625, 0.2229122519493103, -0.7959794998168945, 0.22800764441490173, -0.17319293320178986, 0.367412805557251, -0.05714913085103035, -0.631782054901123, -0.35295701026916504, 0.13765285909175873, -0.39624202251434326, -0.8920605182647705, 0.6317555904388428, 0.1143263652920723, 0.7635056972503662, 0.12265918403863907, 0.17531856894493103, 0.8147669434547424, -0.24134163558483124, 1.050947904586792, 0.022859781980514526, -0.702110767364502, 0.8609393835067749, -0.29626214504241943, 0.0739801898598671, 0.42697030305862427, 0.30018097162246704, -0.41593673825263977, -0.26361775398254395, -0.8858784437179565, -1.1420371532440186, 1.0793837308883667, 0.4888761341571808, -0.37582895159721375, 0.022924121469259262, 0.39728206396102905, -0.08866142481565475, 0.1504896879196167, -0.614436149597168, -0.7378407716751099, -0.16387921571731567, -0.36090394854545593, -0.07471668720245361, 0.00003511728209559806, -0.4066820740699768, -0.43430009484291077, 0.8906713724136353, -0.016923557966947556, 0.46299612522125244, 0.1901635080575943, -0.05528191477060318, -0.121309295296669, 0.16215452551841736, 0.46373358368873596, 0.7262815833091736, -0.42225831747055054, -0.037158019840717316, 0.31833675503730774, -0.6904112696647644, 0.15171962976455688, 0.23715296387672424, 0.06565317511558533, -0.14276473224163055, 0.6045558452606201, 0.798255443572998, 0.14768944680690765, -0.40657442808151245, 0.4232887625694275, 0.10324115306138992, -0.25275734066963196, -0.43922099471092224, 0.07532960921525955, -0.0600992850959301, 0.3265213072299957, 0.4580695927143097, -0.12993916869163513, 0.006973947398364544, -0.3015609383583069, 0.25021931529045105, 0.09842202067375183, -0.027674583718180656, -0.29158884286880493, 0.566217839717865, -0.048402365297079086, -0.3369675278663635, 0.7843523621559143, -0.1037696823477745, -0.6404411792755127, 1.2333029508590698, 0.4292125999927521, 0.7793837785720825, -0.18500378727912903, 0.11692466586828232, 0.6435490250587463, 0.347104012966156, -0.20928622782230377, 0.6642796397209167, 0.07508070021867752, -0.6887633800506592, -0.29269832372665405, -0.7989010810852051, -0.2827833592891693, 0.30656698346138, -1.135218858718872, 0.249111145734787, -0.12747523188591003, -0.3303125500679016, -0.13690291345119476, 0.39950257539749146, -0.759028971195221, 0.09821683168411255, 0.04932772368192673, 0.8666461706161499, -1.0520066022872925, 0.6490398049354553, 0.930498480796814, -0.5200793743133545, -0.9505883455276489, -0.3856978714466095, 0.10123904049396515, -0.9472353458404541, 0.6320545673370361, 0.2839806079864502, 0.3272288739681244, -0.12062807381153107, -0.631283700466156, -1.1356749534606934, 1.534792184829712, 0.06073600798845291, -0.5412759184837341, 0.309965044260025, 0.2972565293312073, 0.34584251046180725, -0.37797197699546814, 0.525471568107605, 0.7995424270629883, 0.788948118686676, -0.08363033831119537, -0.975942850112915, 0.3551926016807556, -0.4666491448879242, -0.040280815213918686, 0.23574218153953552, -0.89537513256073, 1.0197787284851074, -0.20172622799873352, 0.04888290911912918, -0.02711775340139866, 0.296855092048645, 0.7313684821128845, 0.3084874749183655, 0.4659935235977173, 0.8026896715164185, 0.7887605428695679, -0.35579872131347656, 1.1074230670928955, -0.3780032694339752, 0.8224354982376099, 0.9422639012336731, -0.06623856723308563, 0.770652711391449, 0.3882940411567688, -0.4923577904701233, 0.4415857493877411, 0.9505780935287476, -0.391672819852829, 0.571060299873352, 0.16333800554275513, -0.021751075983047485, -0.03568246215581894, -0.027171432971954346, -0.4522607624530792, 0.39669087529182434, 0.2224804013967514, -0.5086237192153931, -0.15041089057922363, -0.21707941591739655, 0.04416928067803383, -0.39075952768325806, -0.3361063599586487, 0.6209567189216614, 0.04755432903766632, -0.4492015838623047, 0.7536295652389526, -0.08841732144355774, 0.7889388799667358, -0.6191858649253845, -0.15458624064922333, -0.20769068598747253, 0.21600699424743652, -0.5036327838897705, -1.111454963684082, 0.19995622336864471, 0.09174598753452301, -0.1619080901145935, -0.13221850991249084, 0.6401991248130798, -0.19006046652793884, -0.4830288290977478, 0.45527106523513794, 0.39479637145996094, 0.40743622183799744, 0.22515501081943512, -0.9850913882255554, 0.302240252494812, 0.28148165345191956, -0.8048456311225891, 0.3386862874031067, 0.2679460048675537, 0.15837538242340088, 0.6754939556121826, 0.6987395882606506, 0.1272984892129898, 0.12763722240924835, -0.01573852263391018, 1.112005352973938, -0.7722112536430359, -0.38333985209465027, -0.7804076075553894, 0.7662599682807922, -0.2625317871570587, -0.6827409863471985, 0.8302870988845825, 0.912756085395813, 0.7993109822273254, 0.08191755414009094, 0.7986283302307129, -0.43596258759498596, 0.4439762830734253, -0.34266701340675354, 0.8345264196395874, -0.6868036985397339, 0.3699389398097992, -0.14516808092594147, -0.8340755105018616, 0.00696582393720746, 0.7922541499137878, -0.21012267470359802, -0.015739675611257553, 0.5187413096427917, 0.9813025593757629, 0.054554712027311325, 0.23854303359985352, -0.06592102348804474, 0.4361710548400879, 0.30207180976867676, 0.6164059638977051, 0.7931370139122009, -0.6235499978065491, 0.3824077546596527, -0.7201417684555054, -0.4046432375907898, -0.11273247748613358, -0.7700474858283997, -0.7467449903488159, -0.5090976357460022, -0.3482363522052765, -0.42245709896087646, 0.06727691739797592, 1.063924789428711, 0.47786468267440796, -0.8399841785430908, -0.4851856529712677, 0.03131876885890961, 0.04563901573419571, -0.17739762365818024, -0.31926220655441284, 0.6439878940582275, -0.08579850196838379, -0.6724351048469543, 0.3292126953601837, -0.1210496798157692, -0.08920460939407349, 0.023362889885902405, -0.2882237136363983, -0.3229007124900818, -0.3096906840801239, 0.4845753014087677, 0.19268745183944702, -0.690666675567627, -0.3431192934513092, -0.17980267107486725, -0.0271444171667099, 0.31288427114486694, 0.17789368331432343, -0.5453298687934875, -0.022543326020240784, 0.4474148452281952, 0.27352455258369446, 0.7742560505867004, 0.03162866458296776, 0.2231537401676178, -0.6438443660736084, -0.024050483480095863, 0.0259069986641407, 0.5687212944030762, 0.2079584151506424, -0.4559290409088135, 1.0346812009811401, 0.2554500997066498, -0.7573280334472656, -0.9003128409385681, -0.30198973417282104, -1.199313759803772, -0.03374278172850609, 1.3395001888275146, -0.34698769450187683, -0.4843901991844177, -0.01794334501028061, -0.1647035926580429, 0.34306925535202026, -0.7148799300193787, 0.6186338663101196, 0.666803240776062, -0.3892245888710022, 0.017429258674383163, -0.593332827091217, 0.2766069173812866, 0.014084013178944588, -1.0005459785461426, -0.12068887799978256, 0.3179284632205963, 0.4371362030506134, 0.19244180619716644, 0.5438251495361328, -0.017724601551890373, -0.15560534596443176, -0.009417742490768433, 0.20734669268131256, -0.31402045488357544, -0.021050283685326576, -0.24765831232070923, 0.08379700034856796, -0.4039481282234192, -0.49389714002609253 ]
open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5
open-llm-leaderboard
2023-10-21T21:05:49Z
200
0
[ "region:us" ]
null
2023-08-18T11:07:47Z
--- pretty_name: Evaluation run of lmsys/vicuna-7b-v1.5 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T21:05:37.153515](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5/blob/main/results_2023-10-21T21-05-37.153515.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.017932046979865772,\n\ \ \"em_stderr\": 0.0013590184569504276,\n \"f1\": 0.08961094798657747,\n\ \ \"f1_stderr\": 0.002014243406072028,\n \"acc\": 0.4016346602057357,\n\ \ \"acc_stderr\": 0.010076117588605417\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.017932046979865772,\n \"em_stderr\": 0.0013590184569504276,\n\ \ \"f1\": 0.08961094798657747,\n \"f1_stderr\": 0.002014243406072028\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08188021228203184,\n \ \ \"acc_stderr\": 0.007552338527716956\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7213891081294396,\n \"acc_stderr\": 0.012599896649493878\n\ \ }\n}\n```" repo_url: https://huggingface.co/lmsys/vicuna-7b-v1.5 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|arc:challenge|25_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-17T12:09:52.202468.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T12_56_49.814418 path: - '**/details_harness|drop|3_2023-10-19T12-56-49.814418.parquet' - split: 2023_10_21T21_05_37.153515 path: - '**/details_harness|drop|3_2023-10-21T21-05-37.153515.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T21-05-37.153515.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T12_56_49.814418 path: - '**/details_harness|gsm8k|5_2023-10-19T12-56-49.814418.parquet' - split: 2023_10_21T21_05_37.153515 path: - '**/details_harness|gsm8k|5_2023-10-21T21-05-37.153515.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T21-05-37.153515.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hellaswag|10_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_17T12_09_52.202468 path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T12:09:52.202468.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-17T12:09:52.202468.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T12_56_49.814418 path: - '**/details_harness|winogrande|5_2023-10-19T12-56-49.814418.parquet' - split: 2023_10_21T21_05_37.153515 path: - '**/details_harness|winogrande|5_2023-10-21T21-05-37.153515.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T21-05-37.153515.parquet' - config_name: results data_files: - split: 2023_08_17T12_09_52.202468 path: - results_2023-08-17T12:09:52.202468.parquet - split: 2023_10_19T12_56_49.814418 path: - results_2023-10-19T12-56-49.814418.parquet - split: 2023_10_21T21_05_37.153515 path: - results_2023-10-21T21-05-37.153515.parquet - split: latest path: - results_2023-10-21T21-05-37.153515.parquet --- # Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-7b-v1.5 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T21:05:37.153515](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5/blob/main/results_2023-10-21T21-05-37.153515.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.017932046979865772, "em_stderr": 0.0013590184569504276, "f1": 0.08961094798657747, "f1_stderr": 0.002014243406072028, "acc": 0.4016346602057357, "acc_stderr": 0.010076117588605417 }, "harness|drop|3": { "em": 0.017932046979865772, "em_stderr": 0.0013590184569504276, "f1": 0.08961094798657747, "f1_stderr": 0.002014243406072028 }, "harness|gsm8k|5": { "acc": 0.08188021228203184, "acc_stderr": 0.007552338527716956 }, "harness|winogrande|5": { "acc": 0.7213891081294396, "acc_stderr": 0.012599896649493878 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3839794993400574, -0.6462745666503906, 0.316388338804245, 0.22954411804676056, -0.2691447138786316, 0.010556547902524471, -0.3065342307090759, -0.16989736258983612, 0.43629732728004456, 0.5510503053665161, -0.7020779848098755, -0.9773902297019958, -0.6017192006111145, 0.20903126895427704, -0.11765117943286896, 1.0208022594451904, -0.21383078396320343, -0.17079000174999237, 0.051667988300323486, -0.3261074721813202, -0.45283761620521545, -0.49521803855895996, -0.5629016160964966, -0.3839579224586487, 0.527503252029419, 0.549045205116272, 0.3233085870742798, 0.7653206586837769, 0.6196017265319824, 0.38118821382522583, -0.11576586216688156, 0.27362677454948425, -0.3573756217956543, -0.05238667130470276, 0.21634280681610107, -0.7169675827026367, -0.8104892373085022, -0.025522369891405106, 0.7283757925033569, 0.37052425742149353, -0.1316547989845276, 0.6278690695762634, 0.13464079797267914, 0.5945567488670349, -0.459339439868927, 0.3279593586921692, -0.37695837020874023, -0.064966581761837, -0.37297698855400085, -0.3571458160877228, -0.034058451652526855, -0.40611907839775085, -0.32487356662750244, -0.5104349851608276, 0.24503298103809357, 0.03686915710568428, 1.0502657890319824, 0.2026711404323578, -0.15383757650852203, -0.16217179596424103, -0.27976658940315247, 0.7615422010421753, -0.9061952233314514, -0.0077951340936124325, 0.6550257802009583, 0.20092496275901794, -0.35671088099479675, -0.5876197218894958, -0.34817057847976685, -0.08034227788448334, -0.16696041822433472, 0.18413332104682922, 0.02303944155573845, -0.08233649283647537, 0.4129323363304138, 0.6124545335769653, -0.6323423981666565, 0.07177767157554626, -0.6697128415107727, -0.09216999262571335, 0.9474798440933228, 0.4209166169166565, 0.05345548316836357, -0.503460168838501, -0.36571478843688965, -0.2856878936290741, -0.3937775790691376, 0.2432538866996765, 0.43524694442749023, 0.6283261775970459, -0.5708589553833008, 0.8388552069664001, -0.46138492226600647, 0.5771290063858032, -0.1516241431236267, -0.26952123641967773, 0.8306471705436707, -0.49037396907806396, -0.34395360946655273, 0.03807011991739273, 1.1056100130081177, 0.44365155696868896, -0.04520981013774872, 0.19516034424304962, -0.282962828874588, -0.002082114340737462, 0.0902596116065979, -0.7199615836143494, -0.033440861850976944, 0.5939604640007019, -0.5149893760681152, -0.42830246686935425, 0.28200238943099976, -0.7884771227836609, -0.3053508996963501, -0.30201879143714905, 0.24179533123970032, -0.19360853731632233, -0.3547287583351135, -0.001169715542346239, -0.10452188551425934, 0.257063090801239, 0.14288686215877533, -0.5622081160545349, 0.3175213932991028, 0.6167527437210083, 1.0366904735565186, -0.09190499037504196, -0.4320778250694275, -0.39068603515625, -0.2357064187526703, -0.22564208507537842, 0.49529334902763367, -0.01868419535458088, -0.3646726608276367, -0.13840889930725098, 0.4334277808666229, -0.28333598375320435, -0.658337414264679, 0.7114753723144531, -0.17586465179920197, 0.1346510797739029, -0.3293643295764923, -0.42899903655052185, -0.06146955490112305, 0.3449949622154236, -0.7334466576576233, 1.4281314611434937, 0.35034215450286865, -0.8588467836380005, 0.1187639981508255, -0.7475935816764832, -0.10197372734546661, 0.0790293738245964, -0.04385795816779137, -0.6100655198097229, -0.08653412014245987, 0.06736888736486435, 0.6432421803474426, -0.3126508891582489, 0.07232300192117691, -0.275089830160141, -0.45167940855026245, 0.1878272444009781, -0.13313882052898407, 1.053142786026001, 0.2479337751865387, -0.4812045693397522, 0.21402528882026672, -1.0588411092758179, 0.16769623756408691, 0.3307381570339203, -0.5053314566612244, -0.24896325170993805, -0.29282814264297485, 0.06013555824756622, 0.14545175433158875, 0.5627208948135376, -0.495789110660553, 0.44943031668663025, -0.09667270630598068, 0.2286185920238495, 0.9560413360595703, 0.004471447318792343, 0.17864911258220673, -0.46449995040893555, 0.564741849899292, 0.0425231009721756, 0.4452873468399048, 0.14312461018562317, -0.48788100481033325, -0.8325457572937012, -0.27133041620254517, 0.07165544480085373, 0.6558099389076233, -0.5542677640914917, 0.7455611824989319, -0.39386558532714844, -0.7281880378723145, -0.822496235370636, 0.14928607642650604, 0.3928976356983185, 0.3974708020687103, 0.40247711539268494, -0.24084530770778656, -0.7636155486106873, -1.0732113122940063, 0.02098206803202629, -0.18357165157794952, 0.018854154273867607, 0.3882094621658325, 0.8589600324630737, -0.30462270975112915, 0.6787188053131104, -0.6399736404418945, -0.3317299485206604, -0.25548627972602844, -0.09342887252569199, 0.8147376179695129, 0.5064989328384399, 0.46517476439476013, -0.6009222269058228, -0.2710150480270386, 0.056840382516384125, -0.8918747901916504, -0.22282786667346954, -0.15396389365196228, -0.280080646276474, 0.3169576823711395, 0.040542371571063995, -0.5110070705413818, 0.5450076460838318, 0.5254456996917725, -0.6469417214393616, 0.6615397930145264, -0.09261485189199448, 0.4133828580379486, -1.241086483001709, 0.16109736263751984, 0.08417275547981262, -0.10593190789222717, -0.43323859572410583, -0.11756857484579086, 0.08444226533174515, 0.469748318195343, -0.5089206099510193, 0.6296002864837646, -0.47057729959487915, -0.2090362310409546, -0.020840371027588844, 0.07166378200054169, -0.08098801225423813, 0.5000377297401428, -0.30487024784088135, 0.7456000447273254, 0.4738427698612213, -0.482965350151062, 0.47464796900749207, 0.27029794454574585, -0.5262922048568726, 0.23111338913440704, -0.5144453644752502, -0.010518600232899189, 0.14302626252174377, 0.10036786645650864, -0.7703616619110107, -0.29653677344322205, 0.5223352313041687, -0.5609508752822876, 0.13711105287075043, -0.3596143126487732, -0.5194886922836304, -0.43477386236190796, -0.4840545356273651, 0.13068324327468872, 0.39607861638069153, -0.4632226526737213, 0.29103171825408936, 0.3385390341281891, 0.07118279486894608, -0.6849905848503113, -0.6940706372261047, -0.16140319406986237, -0.46801841259002686, -0.5001550316810608, 0.26460134983062744, -0.16766145825386047, -0.30195853114128113, -0.014453418552875519, -0.05125982686877251, 0.0032734766136854887, 0.16879472136497498, 0.3374738395214081, 0.5722528100013733, -0.11751478165388107, -0.3433644771575928, -0.21587267518043518, -0.09874355047941208, 0.12312724441289902, 0.10393892228603363, 0.680282473564148, -0.3286665081977844, -0.22479969263076782, -0.23563311994075775, 0.1127575933933258, 0.5719567537307739, -0.10496040433645248, 0.9473730325698853, 0.729934811592102, -0.26623305678367615, -0.04014322906732559, -0.40737977623939514, 0.10842416435480118, -0.4883356988430023, 0.38440823554992676, -0.4570288062095642, -0.7092627882957458, 0.90224289894104, 0.31267568469047546, 0.20340965688228607, 0.7249566912651062, 0.6481686234474182, 0.12302204966545105, 0.6774475574493408, 0.28477349877357483, -0.08312971144914627, 0.5986223220825195, -0.7906923890113831, -0.11741048842668533, -1.085660696029663, -0.4725799262523651, -0.48589783906936646, -0.24443672597408295, -0.8694852590560913, -0.43670395016670227, 0.2349332571029663, 0.21765249967575073, -0.36828628182411194, 0.5494294762611389, -0.7038373351097107, 0.19110479950904846, 0.6224888563156128, 0.33706095814704895, 0.06689835339784622, -0.09374939650297165, -0.038477350026369095, 0.30855512619018555, -0.5863167643547058, -0.4668704867362976, 1.320543885231018, 0.33865973353385925, 0.6144447326660156, 0.009869405068457127, 1.0377424955368042, 0.3455444574356079, 0.34003278613090515, -0.6028076410293579, 0.6333829164505005, 0.0257733017206192, -0.4770146906375885, -0.24259796738624573, -0.6252172589302063, -1.041459560394287, 0.3018573522567749, -0.052291661500930786, -0.9542244672775269, 0.16557228565216064, 0.0012174805160611868, -0.10770715028047562, 0.33515846729278564, -0.5811107158660889, 0.8211265206336975, -0.2938311994075775, -0.5570082068443298, 0.13620872795581818, -0.7262965440750122, 0.4335854649543762, 0.12951023876667023, 0.4031744599342346, -0.256303071975708, 0.00432653957977891, 1.0912917852401733, -0.6262453198432922, 0.7558607459068298, -0.20583389699459076, 0.0022398019209504128, 0.36790919303894043, -0.35773783922195435, 0.5376749038696289, -0.0917336642742157, -0.19155968725681305, 0.3609768748283386, -0.041341282427310944, -0.40687936544418335, -0.36457642912864685, 0.899874210357666, -0.9003852009773254, -0.3376464247703552, -0.46843552589416504, -0.508323073387146, 0.26943930983543396, 0.22526627779006958, 0.42212921380996704, 0.35120341181755066, 0.09091251343488693, 0.21509842574596405, 0.31037023663520813, -0.17326001822948456, 0.46145710349082947, 0.336914598941803, -0.2626863718032837, -0.7361937165260315, 0.7380289435386658, 0.281687468290329, 0.10639690607786179, 0.14292152225971222, 0.04203556105494499, -0.5218583345413208, -0.4321802854537964, -0.33780908584594727, 0.34606099128723145, -0.6410812139511108, -0.33060967922210693, -0.41105687618255615, -0.2648482024669647, -0.4314412474632263, 0.05647590383887291, -0.5341397523880005, -0.41537582874298096, -0.38926267623901367, -0.25765377283096313, 0.5957449674606323, 0.6124573349952698, -0.29918238520622253, 0.25433433055877686, -0.7833664417266846, 0.28546446561813354, -0.2512873411178589, 0.45122116804122925, -0.17015613615512848, -0.5321671962738037, -0.5558686852455139, 0.11377479881048203, -0.30565041303634644, -0.9913238883018494, 0.6408150792121887, -0.17519202828407288, 0.773019015789032, 0.15314719080924988, 0.15737077593803406, 0.763799786567688, -0.1489664912223816, 0.981340229511261, 0.03332085162401199, -0.6256967782974243, 0.7584093809127808, -0.32857513427734375, 0.14829599857330322, 0.5070198178291321, 0.20368579030036926, -0.44237083196640015, -0.2509964108467102, -0.9647654891014099, -1.2170312404632568, 0.8793013095855713, 0.5220773220062256, -0.41851934790611267, 0.06483807414770126, 0.3668309152126312, -0.030877061188220978, 0.13946078717708588, -0.6897469162940979, -0.8273975849151611, -0.13131806254386902, -0.30329564213752747, -0.1421489268541336, -0.06133287027478218, -0.40196287631988525, -0.4346117079257965, 0.8868662714958191, -0.04581919685006142, 0.4666379690170288, 0.3063231110572815, -0.050439681857824326, -0.02169165201485157, 0.32493898272514343, 0.5261885523796082, 0.7438367009162903, -0.4912037253379822, -0.046020377427339554, 0.26692673563957214, -0.5948110818862915, 0.08325977623462677, 0.2785910367965698, -0.012602039612829685, 0.0013097916962578893, 0.5643619298934937, 1.0501818656921387, -0.052848245948553085, -0.3258950412273407, 0.3825359344482422, -0.09431414306163788, -0.3081117868423462, -0.5491326451301575, 0.16430020332336426, -0.07728186994791031, 0.45531052350997925, 0.4434700906276703, -0.2101549208164215, 0.008472733199596405, -0.3971022665500641, 0.17434869706630707, 0.22090445458889008, -0.08731599897146225, -0.24375058710575104, 0.5467711091041565, -0.12286768108606339, -0.40059977769851685, 0.5909422636032104, -0.033743683248758316, -0.572777509689331, 1.0948082208633423, 0.31146955490112305, 0.9269812703132629, -0.15911835432052612, 0.09772992879152298, 0.6410118937492371, 0.42962366342544556, -0.17687882483005524, 0.5740674734115601, 0.1043643057346344, -0.5618846416473389, -0.15765927731990814, -0.7747592329978943, -0.22142170369625092, 0.49116334319114685, -1.1170547008514404, 0.40496018528938293, -0.14378736913204193, -0.35576239228248596, -0.22178268432617188, 0.47048312425613403, -0.8729590773582458, 0.23876099288463593, 0.03464781865477562, 0.875821053981781, -1.0882495641708374, 0.6924812197685242, 0.8413545489311218, -0.5077449083328247, -0.9026895761489868, -0.37144044041633606, -0.044489774852991104, -0.751982569694519, 0.3204163610935211, 0.2677125632762909, 0.35472768545150757, -0.20706257224082947, -0.624588668346405, -0.9746807217597961, 1.5674937963485718, 0.14895938336849213, -0.5899197459220886, 0.2158270627260208, 0.16160793602466583, 0.3699234127998352, -0.28324025869369507, 0.5524827241897583, 0.7708312273025513, 0.6328479647636414, -0.14115263521671295, -0.9400995373725891, 0.32987815141677856, -0.548434317111969, -0.10241055488586426, 0.19428877532482147, -0.9192324280738831, 1.0490388870239258, -0.09899426996707916, 0.053997598588466644, -0.035817086696624756, 0.49273088574409485, 0.7322210669517517, 0.3457038104534149, 0.46610429883003235, 0.522247850894928, 0.7682720422744751, -0.2883095443248749, 1.1259859800338745, -0.2545437812805176, 0.7884178757667542, 1.0790306329727173, 0.17173494398593903, 0.72682124376297, 0.45320406556129456, -0.43391552567481995, 0.43687355518341064, 0.9045066237449646, -0.2912164032459259, 0.3837294578552246, 0.1792980581521988, -0.08508986979722977, -0.07029367983341217, 0.05949261039495468, -0.5382182002067566, 0.34081584215164185, 0.2089555859565735, -0.5647255182266235, -0.049303874373435974, -0.2991756200790405, 0.11017615348100662, -0.33054545521736145, -0.2980295717716217, 0.5279107689857483, 0.04083198308944702, -0.5490513443946838, 0.8274285793304443, -0.08900681138038635, 0.7190676927566528, -0.6869967579841614, -0.08676815032958984, -0.35989052057266235, 0.3098822236061096, -0.5694184899330139, -0.9389469027519226, 0.12988951802253723, 0.09269361197948456, -0.2094782441854477, -0.054585471749305725, 0.5952969789505005, -0.32736024260520935, -0.5637714266777039, 0.4462970197200775, 0.431545227766037, 0.40357282757759094, 0.09986142069101334, -0.8106854557991028, 0.3583531975746155, 0.24955619871616364, -0.8224126696586609, 0.427091121673584, 0.307222843170166, 0.004285530187189579, 0.6722458004951477, 0.6717944145202637, 0.22484546899795532, 0.2236144244670868, 0.07101824879646301, 1.1129411458969116, -0.8113566040992737, -0.41365858912467957, -0.8325349688529968, 0.901167631149292, -0.25044506788253784, -0.5832592844963074, 0.8344893455505371, 0.9849255084991455, 0.7598720788955688, 0.11551845073699951, 0.9072283506393433, -0.40330129861831665, 0.40064650774002075, -0.5151504874229431, 0.8004169464111328, -0.7764655351638794, 0.37954407930374146, -0.18318773806095123, -0.8700892329216003, -0.01608056016266346, 0.6904857158660889, -0.17683394253253937, -0.04497367888689041, 0.6062310934066772, 0.9502619504928589, 0.07452601939439774, -0.06287995725870132, 0.09766426682472229, 0.4377420246601105, 0.3319242596626282, 0.5835168957710266, 0.6416671276092529, -0.7513535022735596, 0.37243056297302246, -0.6657103300094604, -0.43488234281539917, -0.1313042789697647, -0.6527903079986572, -0.827302873134613, -0.5965833067893982, -0.24881750345230103, -0.6420324444770813, 0.09791094064712524, 1.0893285274505615, 0.33157551288604736, -0.7470866441726685, -0.47708940505981445, 0.05003799498081207, 0.15702365338802338, -0.2332429587841034, -0.3525587320327759, 0.605307400226593, -0.09629854559898376, -0.8098503947257996, 0.35449570417404175, -0.24869495630264282, -0.14589302241802216, -0.06014561653137207, -0.2229347825050354, -0.32751205563545227, -0.2802583873271942, 0.42820316553115845, 0.2440336048603058, -0.6700677275657654, -0.27626368403434753, -0.10654901713132858, -0.11764480918645859, 0.29716119170188904, 0.31648513674736023, -0.6073075532913208, 0.01308413315564394, 0.5169470310211182, 0.13740436732769012, 0.6578300595283508, 0.04779871925711632, 0.2298310399055481, -0.6389195919036865, 0.030618680641055107, -0.05078376457095146, 0.5653845071792603, 0.23604102432727814, -0.5101281404495239, 0.9520429372787476, 0.3137565851211548, -0.6452898979187012, -0.9958988428115845, -0.28503623604774475, -1.2715851068496704, -0.026171118021011353, 1.4852615594863892, -0.22793452441692352, -0.5069807767868042, 0.09164437651634216, -0.22910185158252716, 0.4828379154205322, -0.5839341878890991, 0.5564712882041931, 0.5806341171264648, -0.27470508217811584, -0.11640401929616928, -0.708657443523407, 0.3254911005496979, 0.012427153997123241, -1.0049465894699097, 0.028397157788276672, 0.30757763981819153, 0.46493929624557495, 0.14536862075328827, 0.7838830351829529, -0.01710161380469799, -0.12115027755498886, 0.035249657928943634, 0.33642712235450745, -0.3266223073005676, -0.15429897606372833, -0.27721863985061646, 0.07257107645273209, -0.3143964409828186, -0.5956965684890747 ]
open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b
open-llm-leaderboard
2023-10-22T03:38:50Z
200
0
[ "region:us" ]
null
2023-08-18T11:12:09Z
--- pretty_name: Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [OpenAssistant/oasst-sft-1-pythia-12b](https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T03:38:38.139466](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b/blob/main/results_2023-10-22T03-38-38.139466.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.007340604026845637,\n\ \ \"em_stderr\": 0.000874189687534619,\n \"f1\": 0.06295302013422834,\n\ \ \"f1_stderr\": 0.0015822681875736675,\n \"acc\": 0.3140033976135325,\n\ \ \"acc_stderr\": 0.007883417880991842\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.007340604026845637,\n \"em_stderr\": 0.000874189687534619,\n\ \ \"f1\": 0.06295302013422834,\n \"f1_stderr\": 0.0015822681875736675\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006065200909780136,\n \ \ \"acc_stderr\": 0.002138670301460446\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6219415943172849,\n \"acc_stderr\": 0.013628165460523237\n\ \ }\n}\n```" repo_url: https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:16:49.631586.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T03_38_38.139466 path: - '**/details_harness|drop|3_2023-10-22T03-38-38.139466.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T03-38-38.139466.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T03_38_38.139466 path: - '**/details_harness|gsm8k|5_2023-10-22T03-38-38.139466.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T03-38-38.139466.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hellaswag|10_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_16_49.631586 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:16:49.631586.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:16:49.631586.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T03_38_38.139466 path: - '**/details_harness|winogrande|5_2023-10-22T03-38-38.139466.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T03-38-38.139466.parquet' - config_name: results data_files: - split: 2023_07_19T18_16_49.631586 path: - results_2023-07-19T18:16:49.631586.parquet - split: 2023_10_22T03_38_38.139466 path: - results_2023-10-22T03-38-38.139466.parquet - split: latest path: - results_2023-10-22T03-38-38.139466.parquet --- # Dataset Card for Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/oasst-sft-1-pythia-12b](https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T03:38:38.139466](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b/blob/main/results_2023-10-22T03-38-38.139466.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.007340604026845637, "em_stderr": 0.000874189687534619, "f1": 0.06295302013422834, "f1_stderr": 0.0015822681875736675, "acc": 0.3140033976135325, "acc_stderr": 0.007883417880991842 }, "harness|drop|3": { "em": 0.007340604026845637, "em_stderr": 0.000874189687534619, "f1": 0.06295302013422834, "f1_stderr": 0.0015822681875736675 }, "harness|gsm8k|5": { "acc": 0.006065200909780136, "acc_stderr": 0.002138670301460446 }, "harness|winogrande|5": { "acc": 0.6219415943172849, "acc_stderr": 0.013628165460523237 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3549293875694275, -0.7252447605133057, 0.1894335299730301, 0.15403138101100922, -0.12666338682174683, 0.10968651622533798, -0.35253602266311646, -0.11403614282608032, 0.34588658809661865, 0.4561164975166321, -0.6817407608032227, -0.9137287735939026, -0.6272919178009033, 0.09367351233959198, -0.10758424550294876, 1.12431001663208, -0.4273506700992584, -0.15353195369243622, 0.13441503047943115, -0.31117570400238037, -0.29717153310775757, -0.38694441318511963, -0.4078327715396881, -0.3353404402732849, 0.3714396059513092, 0.6320153474807739, 0.4522521197795868, 0.6245338916778564, 0.6887195706367493, 0.3558954894542694, -0.05820535495877266, 0.13548748195171356, -0.515440821647644, -0.13524264097213745, 0.13025853037834167, -0.5316087603569031, -0.724968671798706, 0.1583816558122635, 0.7348864674568176, 0.4876857101917267, -0.1277352124452591, 0.66655433177948, 0.06718127429485321, 0.578005313873291, -0.5442585349082947, 0.37424105405807495, -0.28781989216804504, -0.028806161135435104, -0.34461599588394165, -0.1519080400466919, -0.06888877600431442, -0.4486951529979706, -0.07617227733135223, -0.5089174509048462, 0.11671096831560135, 0.19755947589874268, 1.0604749917984009, 0.1226653903722763, -0.11644621193408966, -0.1597810834646225, -0.3910958468914032, 0.8013083338737488, -0.7954922914505005, -0.010879780165851116, 0.6287185549736023, 0.14156630635261536, -0.24963249266147614, -0.6017482876777649, -0.30771222710609436, -0.1186860203742981, -0.2802916467189789, 0.1645413339138031, -0.07468241453170776, -0.1395678073167801, 0.3946600556373596, 0.7648423910140991, -0.7635324597358704, -0.0324203222990036, -0.6268813610076904, -0.13071385025978088, 0.864099383354187, 0.32652971148490906, 0.12849922478199005, -0.41079750657081604, -0.4248497486114502, -0.27931126952171326, -0.4602981507778168, 0.17955711483955383, 0.538105309009552, 0.45763322710990906, -0.6006872057914734, 0.8634885549545288, -0.5081499218940735, 0.39912867546081543, -0.13386119902133942, -0.11772637069225311, 0.8086270093917847, -0.5205357074737549, -0.20699188113212585, 0.0640939399600029, 1.1309185028076172, 0.3105168044567108, 0.00455502700060606, 0.10911300033330917, -0.20209114253520966, -0.0818680077791214, 0.06054224073886871, -0.8751160502433777, -0.27843838930130005, 0.4089377224445343, -0.6409784555435181, -0.38837742805480957, 0.44266557693481445, -0.9076101779937744, -0.11791777610778809, -0.20174472033977509, 0.3323640823364258, -0.3130403161048889, -0.4722690284252167, -0.11055093258619308, -0.07856353372335434, 0.14230726659297943, 0.12982343137264252, -0.5462523102760315, 0.4333001375198364, 0.5390003323554993, 1.1451356410980225, -0.14171496033668518, -0.4282175898551941, -0.408412367105484, -0.3246256411075592, -0.15831641852855682, 0.32776954770088196, -0.08972688019275665, -0.2540552318096161, -0.22742882370948792, 0.2948538661003113, -0.30902451276779175, -0.4995597004890442, 0.6103923320770264, -0.2627156972885132, 0.12495260685682297, -0.25896215438842773, -0.3166196644306183, -0.14091159403324127, 0.2504841089248657, -0.6850844621658325, 1.4071240425109863, 0.4897119402885437, -0.8916664719581604, 0.08886153250932693, -0.9304506778717041, -0.1487160623073578, 0.030975956469774246, 0.07764166593551636, -0.5471677780151367, -0.08765026181936264, 0.11563273519277573, 0.4261418581008911, -0.3446863889694214, -0.01434100978076458, -0.23981963098049164, -0.4037431478500366, 0.15617887675762177, -0.1464112251996994, 1.0500948429107666, 0.25497370958328247, -0.43477851152420044, 0.214952290058136, -0.9360281229019165, 0.12668290734291077, 0.392015665769577, -0.5236849784851074, -0.2222825288772583, -0.2072659730911255, 0.13580189645290375, 0.14704935252666473, 0.44498899579048157, -0.712670087814331, 0.3816194534301758, -0.24667857587337494, 0.4296812415122986, 1.0390138626098633, -0.020031770691275597, 0.30227407813072205, -0.41371798515319824, 0.5751593112945557, 0.04882390424609184, 0.27243292331695557, 0.050644081085920334, -0.5540874004364014, -0.6268028020858765, -0.31686070561408997, 0.16523411870002747, 0.6532013416290283, -0.5015217661857605, 0.7223033905029297, -0.40243008732795715, -0.6987685561180115, -0.738690197467804, 0.003446824150159955, 0.4869459867477417, 0.5063408613204956, 0.41962745785713196, -0.3160146176815033, -0.6245033144950867, -0.8821862936019897, 0.027343494817614555, -0.2537917494773865, 0.0749422013759613, 0.4691236615180969, 1.1650731563568115, -0.11039600521326065, 0.7738605737686157, -0.8692081570625305, -0.30577459931373596, -0.19497470557689667, 0.05425715446472168, 0.7175488471984863, 0.5550615787506104, 0.4520416855812073, -0.6799261569976807, -0.3897847533226013, -0.013616652227938175, -0.7541970610618591, -0.16486462950706482, -0.05223693698644638, -0.35562390089035034, 0.3156788945198059, -0.040164750069379807, -0.5752370953559875, 0.6471030116081238, 0.4579046368598938, -0.7000671029090881, 0.7335121631622314, -0.003816227661445737, 0.491568922996521, -1.1382488012313843, 0.251926064491272, 0.023139581084251404, 0.048010971397161484, -0.3985140323638916, -0.03216328099370003, -0.003952091094106436, 0.2890135645866394, -0.44470691680908203, 0.6376820802688599, -0.4420607089996338, -0.18278099596500397, 0.1359284222126007, 0.0936025083065033, -0.25039586424827576, 0.5985094904899597, -0.2534281313419342, 0.8144150972366333, 0.5453859567642212, -0.3402700126171112, 0.40348559617996216, 0.5698716640472412, -0.615114152431488, 0.23080001771450043, -0.48361819982528687, -0.006867057643830776, 0.18967218697071075, 0.17237557470798492, -0.9842654466629028, -0.38013985753059387, 0.5682315230369568, -0.6355248689651489, 0.08850222826004028, -0.32639333605766296, -0.6465464234352112, -0.5112081170082092, -0.4836995303630829, 0.29117345809936523, 0.4873625636100769, -0.5609642863273621, 0.2786121666431427, 0.2550292909145355, 0.0008645171765238047, -0.5340908765792847, -0.7233542799949646, -0.3572815954685211, -0.2863650321960449, -0.7176159620285034, 0.364995539188385, -0.10308514535427094, -0.27200356125831604, 0.08816047012805939, -0.16115787625312805, -0.05015303194522858, 0.16971752047538757, 0.4035974442958832, 0.5658746957778931, -0.26653051376342773, -0.3414709270000458, -0.248832568526268, -0.14662814140319824, 0.14372114837169647, -0.014362023212015629, 0.6683408617973328, -0.307012677192688, -0.3537309169769287, -0.22522249817848206, 0.14141713082790375, 0.5093802809715271, -0.292831152677536, 0.9977410435676575, 0.6501835584640503, -0.2632201313972473, -0.02063792385160923, -0.2526618540287018, -0.019141744822263718, -0.46979033946990967, 0.3677913248538971, -0.3447866141796112, -0.7503038644790649, 0.829139769077301, 0.21287664771080017, 0.17414948344230652, 0.6726300716400146, 0.5542742013931274, 0.21176376938819885, 0.8302478194236755, 0.14770518243312836, -0.19851048290729523, 0.5925471186637878, -0.7736412882804871, 0.06790662556886673, -1.1169517040252686, -0.3450595736503601, -0.5341964960098267, -0.41729700565338135, -0.8291836380958557, -0.30269062519073486, 0.30259764194488525, 0.2984433174133301, -0.539048433303833, 0.552215576171875, -0.6772411465644836, 0.21425016224384308, 0.7273575663566589, 0.31956449151039124, 0.06679260730743408, -0.01110114622861147, -0.06541983038187027, 0.2574249505996704, -0.46589118242263794, -0.39276495575904846, 1.3714690208435059, 0.28758352994918823, 0.6589896082878113, 0.11148446798324585, 0.9223688840866089, 0.19725415110588074, 0.28188103437423706, -0.5293676853179932, 0.6020398139953613, 0.04150903597474098, -0.5174165964126587, -0.2535896897315979, -0.6888525485992432, -0.8820130825042725, 0.22454556822776794, 0.03228852152824402, -1.101012945175171, -0.019138043746352196, -0.046635519713163376, -0.11144092679023743, 0.4368056356906891, -0.5873869061470032, 0.9193770885467529, -0.22490334510803223, -0.4721522927284241, 0.06503701210021973, -0.8087747097015381, 0.4352661967277527, 0.14446555078029633, 0.2870236337184906, -0.17043666541576385, 0.07202445715665817, 1.0672985315322876, -0.7492465972900391, 0.6095828413963318, -0.1947636604309082, 0.1622633934020996, 0.3897154927253723, -0.28538978099823, 0.569887638092041, -0.05920620635151863, -0.2402946799993515, 0.4275444746017456, -0.13856402039527893, -0.3210492730140686, -0.2550460398197174, 0.9501298666000366, -0.9365354180335999, -0.24572107195854187, -0.5604891180992126, -0.5623149871826172, 0.27687525749206543, 0.2725391983985901, 0.38074827194213867, 0.28825417160987854, 0.01631605625152588, 0.25982144474983215, 0.3087191581726074, -0.17797164618968964, 0.4635879099369049, 0.3812750577926636, -0.24464070796966553, -0.7663554549217224, 0.7931681871414185, 0.3167705833911896, 0.21343274414539337, 0.19277474284172058, 0.16347736120224, -0.44318264722824097, -0.4538961350917816, -0.4546245336532593, 0.2464493215084076, -0.5567841529846191, -0.31810706853866577, -0.3905697762966156, -0.23455360531806946, -0.4308644235134125, -0.023654025048017502, -0.34740254282951355, -0.4483214020729065, -0.413896769285202, -0.2576775550842285, 0.719001829624176, 0.5479980707168579, -0.29443585872650146, 0.3324904441833496, -0.7326237559318542, 0.30836787819862366, -0.3461245000362396, 0.5367700457572937, -0.08188756555318832, -0.5209156274795532, -0.3662821352481842, 0.12769626080989838, -0.5013760924339294, -0.9568560719490051, 0.49204957485198975, -0.07092203944921494, 0.665302574634552, 0.1323191523551941, 0.23513326048851013, 0.7029997706413269, -0.17269477248191833, 1.0704911947250366, 0.011306100524961948, -0.7355541586875916, 0.801572859287262, -0.3755543828010559, 0.15192197263240814, 0.5277504324913025, 0.30623042583465576, -0.38594067096710205, -0.22710032761096954, -0.8758426904678345, -1.2523903846740723, 1.2642747163772583, 0.6457072496414185, -0.43113404512405396, 0.09412990510463715, 0.2619383633136749, -0.06997334957122803, 0.19490468502044678, -0.7063403725624084, -0.7957584261894226, -0.1466747522354126, -0.3284846246242523, 0.0010540266521275043, 0.1334661841392517, -0.3293350338935852, -0.38981854915618896, 0.9810706973075867, -0.02883545495569706, 0.48797065019607544, 0.22709523141384125, -0.07110092788934708, -0.1759272813796997, 0.21389900147914886, 0.3963248133659363, 0.6513823866844177, -0.429828941822052, -0.14087475836277008, 0.21134763956069946, -0.7066140174865723, 0.030490124598145485, 0.3154554069042206, -0.1287751942873001, -0.1371545046567917, 0.5346342325210571, 0.979193925857544, 0.060369934886693954, -0.39353564381599426, 0.4945659935474396, 0.11929053068161011, -0.309500515460968, -0.41371968388557434, 0.08625467866659164, -0.1510419249534607, 0.36113983392715454, 0.4418655037879944, -0.029510190710425377, -0.03556029126048088, -0.4861324429512024, 0.19596751034259796, 0.2561340928077698, -0.07571115344762802, -0.2852453589439392, 0.5840671062469482, -0.13297782838344574, -0.3788561522960663, 0.8105921149253845, -0.10046005249023438, -0.6076748967170715, 1.0697968006134033, 0.2980206310749054, 0.9436445236206055, -0.15978775918483734, 0.09342410415410995, 0.6434816122055054, 0.36075738072395325, -0.20176754891872406, 0.6252676844596863, 0.09891533851623535, -0.44597432017326355, -0.11388406902551651, -0.8343619704246521, -0.1690775752067566, 0.3437511920928955, -1.0650733709335327, 0.4035760164260864, -0.20243829488754272, -0.13431628048419952, -0.14695361256599426, 0.3385182023048401, -0.785430908203125, 0.1403108686208725, -0.12816090881824493, 0.96392422914505, -1.0205997228622437, 0.5657419562339783, 0.8292791247367859, -0.6592832207679749, -0.9530418515205383, -0.18393634259700775, 0.06356674432754517, -0.6918389201164246, 0.42126744985580444, 0.2712256610393524, 0.3720936179161072, -0.13034789264202118, -0.6339492797851562, -1.0539860725402832, 1.5718064308166504, 0.12349990010261536, -0.48955056071281433, 0.11522048711776733, 0.13646146655082703, 0.34056758880615234, -0.2911742925643921, 0.6230131983757019, 0.7320271730422974, 0.742618203163147, -0.027650602161884308, -0.9532034993171692, 0.38249629735946655, -0.43974220752716064, -0.19841310381889343, 0.48708394169807434, -0.8691587448120117, 1.1505383253097534, -0.21345822513103485, -0.024213269352912903, 0.16929247975349426, 0.3813486695289612, 0.48944956064224243, 0.3293110132217407, 0.3620651066303253, 0.655845582485199, 0.6263238191604614, -0.4866017699241638, 0.9958149194717407, -0.24505077302455902, 0.859076738357544, 1.1287137269973755, 0.03260352090001106, 0.8089219927787781, 0.371841162443161, -0.48176854848861694, 0.46713048219680786, 0.7937880158424377, -0.366438090801239, 0.5207139253616333, 0.22566881775856018, -0.03289173170924187, 0.05946076288819313, 0.11696499586105347, -0.4867773652076721, 0.3546253442764282, 0.11348999291658401, -0.6418681740760803, -0.20665200054645538, -0.23247185349464417, 0.06675633788108826, -0.2815033495426178, -0.3149619996547699, 0.5635182857513428, -0.05762798711657524, -0.5970913767814636, 0.607507586479187, -0.02242598868906498, 0.6104173064231873, -0.7159433960914612, -0.1527603268623352, -0.11273279041051865, 0.34353092312812805, -0.5902875065803528, -0.9972817301750183, 0.18663258850574493, 0.08619997650384903, -0.21940751373767853, -0.20905086398124695, 0.5731202960014343, -0.2760680019855499, -0.48958200216293335, 0.41953009366989136, 0.3295189142227173, 0.35547080636024475, 0.001896418398246169, -0.7522116303443909, 0.2099224478006363, 0.22626297175884247, -0.8214207887649536, 0.4476267695426941, 0.27730119228363037, -0.03989652916789055, 0.4859881103038788, 0.7680622339248657, 0.1315763145685196, 0.14175333082675934, -0.16082815825939178, 1.1533807516098022, -0.713918149471283, -0.4289640486240387, -0.8496227264404297, 0.9296567440032959, -0.1902264803647995, -0.7532059550285339, 0.8283532857894897, 1.0159598588943481, 0.8628590106964111, 0.024517137557268143, 0.8315995335578918, -0.5963507294654846, 0.4470798671245575, -0.422812283039093, 0.9067739844322205, -0.6420065760612488, 0.23916612565517426, -0.27912285923957825, -0.8481767773628235, 0.08495209366083145, 0.7746156454086304, -0.30591949820518494, -0.025449741631746292, 0.6863232851028442, 0.9363610744476318, -0.08612904697656631, 0.22599910199642181, -0.10337348282337189, 0.436734139919281, 0.2597356140613556, 0.6331084966659546, 0.6271536946296692, -0.7541916966438293, 0.4651432931423187, -0.5905887484550476, -0.5078832507133484, -0.15110358595848083, -0.6255642771720886, -0.7815966606140137, -0.5640495419502258, -0.3584176301956177, -0.603039026260376, -0.03662063181400299, 1.1151877641677856, 0.5043568015098572, -0.8666766285896301, -0.3004143536090851, -0.1381695419549942, 0.1566009521484375, -0.3010943531990051, -0.36944761872291565, 0.5792704224586487, 0.028311725705862045, -0.6854198575019836, 0.3008967638015747, -0.16901274025440216, -0.20226453244686127, -0.08541526645421982, -0.2879094183444977, -0.4081743359565735, -0.2536693513393402, 0.3946182131767273, 0.1598680317401886, -0.6791001558303833, -0.33566492795944214, -0.10490033030509949, -0.0024176202714443207, 0.28747233748435974, 0.39548683166503906, -0.580065906047821, -0.00980337429791689, 0.7021180391311646, 0.14082399010658264, 0.6282386183738708, 0.08335036039352417, 0.20282967388629913, -0.6562974452972412, -0.08740977942943573, 0.01961834356188774, 0.5123668313026428, 0.08284023404121399, -0.45620962977409363, 1.0266720056533813, 0.3151707053184509, -0.7036901116371155, -1.0020086765289307, -0.17653703689575195, -1.1318655014038086, -0.015218047425150871, 1.3919421434402466, -0.4200374484062195, -0.35257798433303833, 0.11763843148946762, -0.2891269028186798, 0.3247147500514984, -0.7836775183677673, 0.48858946561813354, 0.755951464176178, -0.2714157700538635, 0.03243400529026985, -0.678596556186676, 0.23212309181690216, -0.0028884464409202337, -1.0259407758712769, 0.04937109351158142, 0.3626305162906647, 0.44136282801628113, 0.33203691244125366, 0.6324020028114319, -0.0692281574010849, -0.018103618174791336, 0.032733168452978134, 0.11489474773406982, -0.330496221780777, -0.2277536690235138, -0.20491361618041992, 0.2158430516719818, -0.3767085075378418, -0.6275694966316223 ]
open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319
open-llm-leaderboard
2023-10-19T09:37:18Z
200
0
[ "region:us" ]
null
2023-08-18T11:12:27Z
--- pretty_name: Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [OpenAssistant/llama2-13b-orca-8k-3319](https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T09:37:05.639025](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319/blob/main/results_2023-10-19T09-37-05.639025.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.07235738255033557,\n\ \ \"em_stderr\": 0.002653208755575334,\n \"f1\": 0.1714293204697988,\n\ \ \"f1_stderr\": 0.0030613909144533535,\n \"acc\": 0.44091694875395904,\n\ \ \"acc_stderr\": 0.010204605702764508\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.07235738255033557,\n \"em_stderr\": 0.002653208755575334,\n\ \ \"f1\": 0.1714293204697988,\n \"f1_stderr\": 0.0030613909144533535\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10993176648976498,\n \ \ \"acc_stderr\": 0.008616195587865418\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.011793015817663597\n\ \ }\n}\n```" repo_url: https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|arc:challenge|25_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-25T11:12:31.858304.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T09_37_05.639025 path: - '**/details_harness|drop|3_2023-10-19T09-37-05.639025.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T09-37-05.639025.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T09_37_05.639025 path: - '**/details_harness|gsm8k|5_2023-10-19T09-37-05.639025.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T09-37-05.639025.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hellaswag|10_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_25T11_12_31.858304 path: - '**/details_harness|truthfulqa:mc|0_2023-07-25T11:12:31.858304.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-25T11:12:31.858304.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T09_37_05.639025 path: - '**/details_harness|winogrande|5_2023-10-19T09-37-05.639025.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T09-37-05.639025.parquet' - config_name: results data_files: - split: 2023_07_25T11_12_31.858304 path: - results_2023-07-25T11:12:31.858304.parquet - split: 2023_10_19T09_37_05.639025 path: - results_2023-10-19T09-37-05.639025.parquet - split: latest path: - results_2023-10-19T09-37-05.639025.parquet --- # Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/llama2-13b-orca-8k-3319](https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T09:37:05.639025](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319/blob/main/results_2023-10-19T09-37-05.639025.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.07235738255033557, "em_stderr": 0.002653208755575334, "f1": 0.1714293204697988, "f1_stderr": 0.0030613909144533535, "acc": 0.44091694875395904, "acc_stderr": 0.010204605702764508 }, "harness|drop|3": { "em": 0.07235738255033557, "em_stderr": 0.002653208755575334, "f1": 0.1714293204697988, "f1_stderr": 0.0030613909144533535 }, "harness|gsm8k|5": { "acc": 0.10993176648976498, "acc_stderr": 0.008616195587865418 }, "harness|winogrande|5": { "acc": 0.7719021310181531, "acc_stderr": 0.011793015817663597 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.34897035360336304, -0.6829118132591248, 0.18219158053398132, 0.17410452663898468, -0.15348376333713531, 0.0941970944404602, -0.32104960083961487, -0.2639542818069458, 0.43410998582839966, 0.5732055902481079, -0.6761260628700256, -1.0091148614883423, -0.6772111654281616, 0.14106443524360657, -0.12398486584424973, 1.112450361251831, -0.36629006266593933, -0.19062264263629913, 0.14216431975364685, -0.35419416427612305, -0.36583849787712097, -0.36614203453063965, -0.5939309000968933, -0.3803269863128662, 0.3730919659137726, 0.6318670511245728, 0.37935206294059753, 0.7517752647399902, 0.699980616569519, 0.33294370770454407, -0.12022065371274948, 0.24384669959545135, -0.483426034450531, -0.10831908881664276, 0.17557862401008606, -0.6206480860710144, -0.7960083484649658, 0.10564682632684708, 0.6283648610115051, 0.4726479947566986, -0.17720992863178253, 0.6695351600646973, 0.1032521054148674, 0.5601769089698792, -0.552169144153595, 0.4623866379261017, -0.31911730766296387, -0.04036916047334671, -0.4725407063961029, -0.13860709965229034, -0.010089103132486343, -0.41678082942962646, -0.2595762312412262, -0.5479541420936584, 0.02811700850725174, 0.12497181445360184, 1.0328117609024048, 0.171831414103508, -0.12949435412883759, -0.2711593210697174, -0.2443227767944336, 0.837547779083252, -0.8785208463668823, -0.07621742784976959, 0.654934823513031, 0.09701002389192581, -0.37439367175102234, -0.5688797831535339, -0.3735557496547699, -0.07737871259450912, -0.18470418453216553, 0.21055491268634796, 0.03641675412654877, -0.17272411286830902, 0.35627973079681396, 0.635673463344574, -0.6769751310348511, 0.11368619650602341, -0.6047329306602478, -0.1210978627204895, 0.9751269221305847, 0.4078930616378784, 0.08750145882368088, -0.41250523924827576, -0.4055106043815613, -0.3734215497970581, -0.5021635890007019, 0.33862805366516113, 0.5461559891700745, 0.49547430872917175, -0.6620559692382812, 0.8920661807060242, -0.42429277300834656, 0.4367731809616089, -0.1668262779712677, -0.3131004869937897, 0.8300858736038208, -0.5402066707611084, -0.1605159044265747, 0.03235175833106041, 1.0335583686828613, 0.42031991481781006, -0.02557685226202011, 0.26614081859588623, -0.2934205234050751, -0.045150354504585266, 0.008596663363277912, -0.7374232411384583, -0.12182305008172989, 0.414153516292572, -0.6549904942512512, -0.408540278673172, 0.3200969696044922, -0.9097465872764587, -0.17044179141521454, -0.20562168955802917, 0.1924315243959427, -0.19617542624473572, -0.3468669652938843, -0.049752864986658096, -0.06287360936403275, 0.30039966106414795, 0.15355184674263, -0.6123040914535522, 0.41141635179519653, 0.5877812504768372, 1.0502772331237793, -0.17607158422470093, -0.3813791573047638, -0.3752079904079437, -0.21430711448192596, -0.21889764070510864, 0.49898195266723633, -0.2338499277830124, -0.4094829559326172, -0.1686844825744629, 0.2789973318576813, -0.2985188663005829, -0.5970602631568909, 0.7014865279197693, -0.28949305415153503, 0.06489351391792297, -0.3119986355304718, -0.26642394065856934, -0.18054157495498657, 0.2730292081832886, -0.676116406917572, 1.4653619527816772, 0.3298549950122833, -0.8128051161766052, 0.10383675247430801, -0.9808158874511719, -0.21117690205574036, -0.049430396407842636, 0.05121554061770439, -0.5640258193016052, -0.18443123996257782, 0.18252545595169067, 0.42510825395584106, -0.35341498255729675, -0.04744958132505417, -0.33058446645736694, -0.32302358746528625, 0.11168862134218216, -0.0034291220363229513, 1.1200679540634155, 0.24173927307128906, -0.41485264897346497, 0.11957502365112305, -0.9794860482215881, 0.0552084818482399, 0.39321231842041016, -0.6052535176277161, -0.228236585855484, -0.21693307161331177, 0.0733262151479721, 0.10134731978178024, 0.5659259557723999, -0.7463487386703491, 0.3782089948654175, -0.24295248091220856, 0.3998250663280487, 1.0009493827819824, -0.043932050466537476, 0.22909574210643768, -0.3919580280780792, 0.5992525815963745, -0.00791157502681017, 0.31733188033103943, 0.11566460132598877, -0.6130739450454712, -0.7353569269180298, -0.2961246371269226, 0.10697989165782928, 0.6690337657928467, -0.4222318232059479, 0.7009727358818054, -0.34122779965400696, -0.6635058522224426, -0.7526095509529114, 0.09974651038646698, 0.4963832199573517, 0.5597692131996155, 0.3884480595588684, -0.4616033434867859, -0.7053765058517456, -0.9183792471885681, 0.10936377197504044, -0.25056543946266174, 0.06395693123340607, 0.535029947757721, 1.0850186347961426, -0.21404173970222473, 0.7404263615608215, -0.7956013083457947, -0.4210798740386963, -0.22433896362781525, 0.005531845614314079, 0.7229785919189453, 0.51377934217453, 0.46389341354370117, -0.6322231888771057, -0.3327792286872864, -0.024903247132897377, -0.8419373035430908, -0.2727750539779663, -0.025957021862268448, -0.30690887570381165, 0.3306431472301483, 0.04572020843625069, -0.5596151947975159, 0.6207833290100098, 0.5039911270141602, -0.5170559883117676, 0.6917561292648315, 0.007674898486584425, 0.3177622854709625, -1.1238702535629272, 0.28272172808647156, -0.08058059215545654, 0.04752929508686066, -0.35003888607025146, -0.06129277125000954, -0.029168440029025078, 0.29690030217170715, -0.4343007504940033, 0.6677587032318115, -0.3947432339191437, -0.26719629764556885, 0.08785125613212585, 0.13387872278690338, -0.17199192941188812, 0.535810649394989, -0.22070442140102386, 0.8019578456878662, 0.5139468312263489, -0.30298754572868347, 0.3927287757396698, 0.5563682913780212, -0.5836086869239807, 0.23026007413864136, -0.4991549551486969, 0.04889962077140808, 0.15866057574748993, 0.2866913974285126, -1.0042331218719482, -0.4086659550666809, 0.509088933467865, -0.5237067937850952, 0.13750018179416656, -0.19873301684856415, -0.616716206073761, -0.4936334788799286, -0.5840145945549011, 0.289879709482193, 0.3929857611656189, -0.5546318888664246, 0.2868879437446594, 0.40569204092025757, -0.04415768384933472, -0.6606261134147644, -0.7466256022453308, -0.18028903007507324, -0.3266281187534332, -0.6770567893981934, 0.34298908710479736, -0.130671426653862, -0.25655466318130493, 0.004962131381034851, -0.06992323696613312, 0.01059907115995884, 0.15578332543373108, 0.44230565428733826, 0.5487931370735168, -0.16287976503372192, -0.35906681418418884, -0.17721866071224213, -0.15541043877601624, 0.19156500697135925, 0.13307608664035797, 0.6231584548950195, -0.24653536081314087, -0.3477100133895874, -0.17362987995147705, 0.13769902288913727, 0.45481353998184204, -0.27832311391830444, 0.9082559943199158, 0.6245055198669434, -0.17091304063796997, -0.04132263734936714, -0.33611977100372314, 0.00736162019893527, -0.4753224551677704, 0.3334442377090454, -0.35799136757850647, -0.7757312059402466, 0.9237638115882874, 0.22624088823795319, 0.2676560580730438, 0.6771034002304077, 0.5989970564842224, 0.15711426734924316, 0.7701341509819031, 0.2678501009941101, -0.17958804965019226, 0.49816641211509705, -0.7461251020431519, -0.009007132612168789, -1.1608903408050537, -0.5277085304260254, -0.5025447010993958, -0.4434747099876404, -0.7459787130355835, -0.2446572184562683, 0.3027467131614685, 0.18123182654380798, -0.44546806812286377, 0.5569958686828613, -0.616777241230011, 0.20362427830696106, 0.6692847609519958, 0.2712448239326477, 0.08267276734113693, -0.1024450734257698, -0.11867371946573257, 0.3426940441131592, -0.4278523623943329, -0.48126983642578125, 1.447898268699646, 0.278349906206131, 0.6687476634979248, 0.08207952231168747, 0.9033863544464111, 0.3087027072906494, 0.2787111699581146, -0.49122101068496704, 0.6616484522819519, -0.02088199555873871, -0.49073830246925354, -0.21006949245929718, -0.5391600131988525, -1.0135418176651, 0.148655965924263, 0.08470775187015533, -1.041504144668579, 0.1108226403594017, -0.08186570554971695, -0.12467505037784576, 0.3811049163341522, -0.5381074547767639, 0.8176937699317932, -0.1823674589395523, -0.2577038109302521, 0.04252001643180847, -0.9140145778656006, 0.5150107145309448, 0.011537124402821064, 0.3080507218837738, -0.24123381078243256, -0.111823171377182, 1.1174930334091187, -0.7315717935562134, 0.7582061886787415, -0.13555312156677246, 0.046918999403715134, 0.40375787019729614, -0.3298141062259674, 0.5763041377067566, -0.058255814015865326, -0.29629653692245483, 0.5240210890769958, -0.2028588503599167, -0.2785840928554535, -0.20853298902511597, 0.8873780369758606, -0.9489870667457581, -0.3010912537574768, -0.40312156081199646, -0.5883325338363647, 0.31101667881011963, 0.2280445098876953, 0.3384709656238556, 0.24964506924152374, 0.09079805016517639, 0.1818762570619583, 0.24409501254558563, -0.20654787123203278, 0.5284774899482727, 0.4675656855106354, -0.16156305372714996, -0.7157902121543884, 0.6656283736228943, 0.2943950593471527, 0.11326006799936295, 0.23793095350265503, 0.021087275817990303, -0.49695461988449097, -0.4477896988391876, -0.38939860463142395, 0.32137221097946167, -0.562525749206543, -0.38645151257514954, -0.41476690769195557, -0.2602309584617615, -0.4163688123226166, 0.007944999262690544, -0.39687252044677734, -0.4815284013748169, -0.5065178871154785, -0.2888824939727783, 0.6591407656669617, 0.6207414269447327, -0.45234042406082153, 0.26527270674705505, -0.6200048923492432, 0.23182635009288788, -0.24780133366584778, 0.3971896469593048, -0.03975503146648407, -0.6080338358879089, -0.3770306706428528, 0.09588251262903214, -0.4939652383327484, -0.862832248210907, 0.5450602769851685, -0.01265944354236126, 0.6334612369537354, 0.20439735054969788, 0.22428639233112335, 0.758769690990448, -0.16705992817878723, 1.0712361335754395, -0.044229380786418915, -0.7076064944267273, 0.7945097088813782, -0.24881643056869507, 0.09489321708679199, 0.4746673107147217, 0.21973483264446259, -0.3451146185398102, -0.3010290265083313, -0.8128625750541687, -1.1994357109069824, 1.1946605443954468, 0.5771870017051697, -0.37930864095687866, 0.06946856528520584, 0.33907613158226013, -0.004660434555262327, 0.15973898768424988, -0.6653965711593628, -0.7785830497741699, -0.11002222448587418, -0.23336105048656464, -0.0695360004901886, -0.010820089839398861, -0.36488184332847595, -0.26748189330101013, 0.8979510068893433, 0.011215479113161564, 0.4664039611816406, 0.16929152607917786, -0.010003013536334038, -0.13798588514328003, 0.1912030726671219, 0.5355521440505981, 0.6368691325187683, -0.4573761522769928, -0.08043448626995087, 0.3030926585197449, -0.696882426738739, 0.01619638502597809, 0.28545573353767395, -0.035362329334020615, -0.16979268193244934, 0.6215445399284363, 0.8644579648971558, -0.05499072000384331, -0.4966186285018921, 0.4650605022907257, 0.12278421223163605, -0.25961068272590637, -0.44491833448410034, 0.12416787445545197, -0.12395283579826355, 0.3918457627296448, 0.3952324688434601, -0.07622549682855606, -0.0006337818340398371, -0.3858119249343872, 0.13821814954280853, 0.2764051854610443, -0.008744582533836365, -0.3355954885482788, 0.6448752880096436, -0.09148149937391281, -0.35097575187683105, 0.7366986274719238, -0.04139337316155434, -0.5323590636253357, 1.1610382795333862, 0.2702479064464569, 0.8724020719528198, -0.17759528756141663, 0.04380353167653084, 0.5513268709182739, 0.37106066942214966, -0.13365882635116577, 0.671719491481781, 0.0798892080783844, -0.5610156655311584, -0.2110850214958191, -0.7881406545639038, -0.21566642820835114, 0.4034233093261719, -1.1095908880233765, 0.37370404601097107, -0.2112520933151245, -0.2629624307155609, -0.12957394123077393, 0.38477471470832825, -0.7600394487380981, 0.09129522740840912, -0.008485193364322186, 1.0176632404327393, -1.064727783203125, 0.6200041770935059, 0.8405927419662476, -0.5681425333023071, -0.9603428244590759, -0.3549080789089203, 0.09958960115909576, -0.8977838754653931, 0.5771306157112122, 0.2723579406738281, 0.3135679066181183, -0.22194312512874603, -0.6830437183380127, -1.098731517791748, 1.5425492525100708, 0.19749228656291962, -0.4362422823905945, 0.2495887577533722, 0.2195061445236206, 0.3521742820739746, -0.3787752687931061, 0.627209484577179, 0.7679405212402344, 0.7413432002067566, -0.10101517289876938, -0.9946742057800293, 0.3446275293827057, -0.4668790102005005, -0.08148252964019775, 0.3363823890686035, -0.9874165058135986, 1.0924761295318604, -0.2140856683254242, 0.011164689436554909, 0.17083673179149628, 0.4115172326564789, 0.5853580832481384, 0.34898698329925537, 0.40103238821029663, 0.7334152460098267, 0.641502857208252, -0.3195880055427551, 1.0137144327163696, -0.2232731282711029, 0.7779507040977478, 1.0621618032455444, -0.045053042471408844, 0.8468760251998901, 0.34923046827316284, -0.4492124319076538, 0.5833781957626343, 0.8891578912734985, -0.3231299817562103, 0.5397675633430481, 0.13382503390312195, -0.02494450844824314, 0.03957682475447655, -0.06862915307283401, -0.5187984108924866, 0.43986356258392334, 0.20181679725646973, -0.616231381893158, -0.2537495493888855, -0.2433510571718216, 0.11587750166654587, -0.3444793224334717, -0.26492124795913696, 0.5907381176948547, 0.02651512436568737, -0.5111303329467773, 0.7261079549789429, 0.0019513493170961738, 0.5633350014686584, -0.6938137412071228, -0.1631053239107132, -0.298191100358963, 0.2568119168281555, -0.5735700130462646, -1.0635693073272705, 0.17965121567249298, 0.1242193654179573, -0.22894051671028137, -0.20185215771198273, 0.5420262217521667, -0.2030724436044693, -0.4344370365142822, 0.5020686984062195, 0.2706044018268585, 0.46257513761520386, 0.18488042056560516, -0.8462002277374268, 0.26467689871788025, 0.2890649139881134, -0.8019986152648926, 0.45318785309791565, 0.27380573749542236, -0.034458644688129425, 0.5924312472343445, 0.7824708819389343, 0.15105053782463074, 0.0969151183962822, -0.12661099433898926, 1.2034038305282593, -0.6786780953407288, -0.3734712302684784, -0.808678925037384, 0.8239217400550842, -0.1568126231431961, -0.6991606950759888, 0.827468752861023, 1.0087288618087769, 0.8662708401679993, 0.0912100300192833, 0.7528800964355469, -0.5150659680366516, 0.4900361895561218, -0.43512630462646484, 0.8126761317253113, -0.699221670627594, 0.38693565130233765, -0.15795831382274628, -0.9119048714637756, -0.001184999942779541, 0.6800652146339417, -0.2791447937488556, -0.04576781764626503, 0.5547685027122498, 1.0679384469985962, 0.014955881983041763, 0.11527778208255768, -0.13623954355716705, 0.4579288959503174, 0.3012312650680542, 0.6691972613334656, 0.6038972735404968, -0.6222485303878784, 0.516448974609375, -0.5469844341278076, -0.5050445199012756, -0.17952829599380493, -0.6742918491363525, -0.8227601051330566, -0.45138081908226013, -0.2797197103500366, -0.5262005925178528, 0.04824089631438255, 0.988166332244873, 0.4903297424316406, -0.888271152973175, -0.4643992483615875, -0.03688839450478554, 0.1669597029685974, -0.24542008340358734, -0.33792853355407715, 0.6739067435264587, -0.04125820845365524, -0.6987878084182739, 0.3595939874649048, -0.09349904209375381, -0.20981276035308838, -0.06282532960176468, -0.2429923713207245, -0.30592843890190125, -0.21383249759674072, 0.42015451192855835, 0.32296112179756165, -0.7187588214874268, -0.2809704542160034, -0.18969416618347168, -0.04246513918042183, 0.29556483030319214, 0.24764202535152435, -0.5919401049613953, 0.050492916256189346, 0.5693418383598328, 0.16391485929489136, 0.6497114300727844, 0.10928796976804733, 0.17585286498069763, -0.6761564612388611, -0.027969814836978912, -0.03144371509552002, 0.4862254559993744, 0.10306624323129654, -0.4267960488796234, 1.0634510517120361, 0.2724120318889618, -0.6936721801757812, -0.9611482620239258, -0.19162346422672272, -1.2294652462005615, 0.02777804620563984, 1.4150182008743286, -0.4022141993045807, -0.36481696367263794, 0.09691855311393738, -0.22489196062088013, 0.2559826374053955, -0.8475596904754639, 0.5750139355659485, 0.6311666965484619, -0.3430210053920746, 0.058989446610212326, -0.6232814192771912, 0.21954545378684998, 0.029324917122721672, -1.015486478805542, -0.08061113208532333, 0.23968397080898285, 0.35829856991767883, 0.26982060074806213, 0.5598922371864319, -0.05525956302881241, -0.10989201068878174, -0.007444652263075113, 0.2087007761001587, -0.3095250427722931, -0.10259175300598145, -0.17474506795406342, 0.12374138832092285, -0.3925429582595825, -0.6158643364906311 ]
open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5
open-llm-leaderboard
2023-10-22T03:00:47Z
200
0
[ "region:us" ]
null
2023-08-18T11:12:36Z
--- pretty_name: Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5](https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T03:00:35.046242](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5/blob/main/results_2023-10-22T03-00-35.046242.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n\ \ \"em_stderr\": 0.00037786091964606887,\n \"f1\": 0.059077181208053976,\n\ \ \"f1_stderr\": 0.001394848925611238,\n \"acc\": 0.3446815500250423,\n\ \ \"acc_stderr\": 0.009023084450724785\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.001363255033557047,\n \"em_stderr\": 0.00037786091964606887,\n\ \ \"f1\": 0.059077181208053976,\n \"f1_stderr\": 0.001394848925611238\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.030326004548900682,\n \ \ \"acc_stderr\": 0.004723487465514761\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.659037095501184,\n \"acc_stderr\": 0.013322681435934807\n\ \ }\n}\n```" repo_url: https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:18:17.138849.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T03_00_35.046242 path: - '**/details_harness|drop|3_2023-10-22T03-00-35.046242.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T03-00-35.046242.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T03_00_35.046242 path: - '**/details_harness|gsm8k|5_2023-10-22T03-00-35.046242.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T03-00-35.046242.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hellaswag|10_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_18_17.138849 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:18:17.138849.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:18:17.138849.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T03_00_35.046242 path: - '**/details_harness|winogrande|5_2023-10-22T03-00-35.046242.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T03-00-35.046242.parquet' - config_name: results data_files: - split: 2023_07_19T18_18_17.138849 path: - results_2023-07-19T18:18:17.138849.parquet - split: 2023_10_22T03_00_35.046242 path: - results_2023-10-22T03-00-35.046242.parquet - split: latest path: - results_2023-10-22T03-00-35.046242.parquet --- # Dataset Card for Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5](https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T03:00:35.046242](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5/blob/main/results_2023-10-22T03-00-35.046242.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964606887, "f1": 0.059077181208053976, "f1_stderr": 0.001394848925611238, "acc": 0.3446815500250423, "acc_stderr": 0.009023084450724785 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964606887, "f1": 0.059077181208053976, "f1_stderr": 0.001394848925611238 }, "harness|gsm8k|5": { "acc": 0.030326004548900682, "acc_stderr": 0.004723487465514761 }, "harness|winogrande|5": { "acc": 0.659037095501184, "acc_stderr": 0.013322681435934807 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.36525386571884155, -0.7363730072975159, 0.2600392997264862, 0.12750153243541718, -0.1346568614244461, 0.07665041089057922, -0.3673919141292572, -0.1790655553340912, 0.31776198744773865, 0.4787713289260864, -0.6093949675559998, -0.9091064929962158, -0.5987065434455872, 0.11872641742229462, -0.06686793267726898, 1.1270992755889893, -0.4215376079082489, -0.16834011673927307, 0.13371631503105164, -0.2677444815635681, -0.31620872020721436, -0.4047512114048004, -0.441279798746109, -0.34628814458847046, 0.32403111457824707, 0.6054909229278564, 0.42370137572288513, 0.6826756596565247, 0.6901761293411255, 0.36667078733444214, -0.08973535895347595, 0.1372859627008438, -0.4938383400440216, -0.1430560201406479, 0.18702039122581482, -0.5781821608543396, -0.7703147530555725, 0.17023871839046478, 0.7478366494178772, 0.4740227162837982, -0.13094569742679596, 0.6414796710014343, 0.05663618817925453, 0.5522996783256531, -0.5251394510269165, 0.4085076153278351, -0.3017093241214752, 0.00022132013691589236, -0.3812311589717865, -0.18976953625679016, -0.050883885473012924, -0.4396105110645294, -0.07130628079175949, -0.5332176089286804, 0.158920019865036, 0.17704662680625916, 1.0948119163513184, 0.0707039013504982, -0.08280466496944427, -0.10989343374967575, -0.42478758096694946, 0.8286821246147156, -0.8462209105491638, 0.03173964470624924, 0.6392137408256531, 0.12536364793777466, -0.22392109036445618, -0.6422329545021057, -0.3624180853366852, -0.06576063483953476, -0.21709097921848297, 0.15580005943775177, -0.04359464719891548, -0.15607565641403198, 0.3931647539138794, 0.7992793917655945, -0.6880098581314087, 0.011291101574897766, -0.6073051691055298, -0.17746572196483612, 0.8553996086120605, 0.3350715935230255, 0.07102683186531067, -0.4336949288845062, -0.45400652289390564, -0.29989251494407654, -0.4703504741191864, 0.21092216670513153, 0.5980704426765442, 0.4584856629371643, -0.6111676096916199, 0.8274579048156738, -0.5125633478164673, 0.45968878269195557, -0.12813116610050201, -0.1610090285539627, 0.8694769144058228, -0.5487313866615295, -0.1877235323190689, 0.003103879513218999, 1.1419035196304321, 0.38790568709373474, -0.04775209352374077, 0.141214519739151, -0.2254810780286789, -0.13098207116127014, 0.04157206416130066, -0.9023817181587219, -0.2658262848854065, 0.4005933701992035, -0.6467442512512207, -0.3674290180206299, 0.36867761611938477, -0.95233154296875, -0.08364702016115189, -0.198879212141037, 0.3195628225803375, -0.3207983374595642, -0.42440980672836304, -0.13519278168678284, -0.12599658966064453, 0.17260582745075226, 0.1343049705028534, -0.5941160917282104, 0.36730703711509705, 0.5505011677742004, 1.1267281770706177, -0.11520611494779587, -0.4057430028915405, -0.3611108064651489, -0.2907804250717163, -0.155059352517128, 0.3317607641220093, -0.12184518575668335, -0.28960809111595154, -0.24907425045967102, 0.30332598090171814, -0.2765481472015381, -0.4973166584968567, 0.6582063436508179, -0.32654309272766113, 0.1537935882806778, -0.21871063113212585, -0.34246501326560974, -0.16085544228553772, 0.2753942012786865, -0.6788071990013123, 1.3945273160934448, 0.42267563939094543, -0.844467043876648, 0.11893283575773239, -0.8868648409843445, -0.10218315571546555, 0.06539717316627502, 0.0765787661075592, -0.5701677203178406, -0.14458513259887695, 0.1804026961326599, 0.44708916544914246, -0.3576744794845581, -0.02173401415348053, -0.27768662571907043, -0.40985745191574097, 0.12797757983207703, -0.1544552892446518, 1.020910382270813, 0.24905216693878174, -0.47270527482032776, 0.16738519072532654, -0.9094069004058838, 0.12725311517715454, 0.36693596839904785, -0.5116944313049316, -0.2138805240392685, -0.2003505378961563, 0.0603959821164608, 0.08582332730293274, 0.5071426630020142, -0.6967974901199341, 0.3259163796901703, -0.25673049688339233, 0.46403875946998596, 1.0449272394180298, 0.011041911318898201, 0.2998378872871399, -0.39108890295028687, 0.5001945495605469, 0.05826588720083237, 0.3168853223323822, 0.010160854086279869, -0.6188837885856628, -0.6574520468711853, -0.2847342789173126, 0.1633722186088562, 0.6138532757759094, -0.48529380559921265, 0.7357041835784912, -0.4194709062576294, -0.7021005749702454, -0.7425193786621094, -0.01619085855782032, 0.4459078013896942, 0.5572638511657715, 0.45342758297920227, -0.3137790560722351, -0.6339817643165588, -0.888819694519043, 0.03172203153371811, -0.282757043838501, 0.05863654240965843, 0.48806414008140564, 1.1051641702651978, -0.16664338111877441, 0.7515273690223694, -0.8544510006904602, -0.29644811153411865, -0.09747057408094406, 0.07273992151021957, 0.714928925037384, 0.532669723033905, 0.4324208199977875, -0.6861218214035034, -0.3148895502090454, 0.0019558637868613005, -0.7792115211486816, -0.13324373960494995, -0.024141594767570496, -0.30217233300209045, 0.3413558900356293, -0.017340006306767464, -0.5339553952217102, 0.5876765847206116, 0.4682675898075104, -0.6361814141273499, 0.707796573638916, -0.0149423498660326, 0.5185387134552002, -1.1355475187301636, 0.24568866193294525, -0.0005812064628116786, 0.04360291734337807, -0.38948601484298706, -0.01473907195031643, 0.02567443437874317, 0.276856392621994, -0.4183296263217926, 0.6013930439949036, -0.43527162075042725, -0.17769834399223328, 0.15237782895565033, 0.12189391255378723, -0.21071133017539978, 0.5688624382019043, -0.2247532308101654, 0.7658366560935974, 0.510086178779602, -0.3280891478061676, 0.42221763730049133, 0.5091469287872314, -0.5656716823577881, 0.26914453506469727, -0.4501844346523285, 0.014891828410327435, 0.22374655306339264, 0.19097857177257538, -0.9008775353431702, -0.3621865212917328, 0.5416695475578308, -0.6419723629951477, 0.05677536129951477, -0.3447953760623932, -0.5924217700958252, -0.5201965570449829, -0.525390088558197, 0.3032708168029785, 0.5129683613777161, -0.5643061995506287, 0.296889066696167, 0.25628867745399475, 0.017877478152513504, -0.5634433031082153, -0.710547149181366, -0.3001824915409088, -0.3275794982910156, -0.7309842705726624, 0.41760551929473877, -0.13974465429782867, -0.23192192614078522, 0.10079494118690491, -0.10286958515644073, -0.007777965161949396, 0.139333114027977, 0.3987859785556793, 0.6050311326980591, -0.2768462300300598, -0.3933931589126587, -0.29148322343826294, -0.14829733967781067, 0.13493183255195618, -0.02352926693856716, 0.6733201146125793, -0.3480586111545563, -0.30750223994255066, -0.28364357352256775, 0.0762624517083168, 0.4865261912345886, -0.268806517124176, 0.9491549134254456, 0.7002893090248108, -0.2570219337940216, 0.03496186062693596, -0.298604816198349, -0.04159863293170929, -0.47698843479156494, 0.3174419403076172, -0.34646087884902954, -0.8069502115249634, 0.8584060072898865, 0.2148311883211136, 0.19770728051662445, 0.7411158680915833, 0.5788627862930298, 0.1920991837978363, 0.8051015138626099, 0.10800524055957794, -0.143774151802063, 0.560234546661377, -0.7721148729324341, 0.08347161114215851, -1.153960943222046, -0.40836548805236816, -0.4891643524169922, -0.41279417276382446, -0.8266444206237793, -0.3475266993045807, 0.26915860176086426, 0.2801680862903595, -0.545439600944519, 0.48776504397392273, -0.6524019837379456, 0.17937473952770233, 0.703526496887207, 0.2641961872577667, 0.0662902295589447, -0.05322279408574104, -0.03485443443059921, 0.22346992790699005, -0.400010347366333, -0.38911205530166626, 1.3911912441253662, 0.30536186695098877, 0.6159963011741638, 0.0848272517323494, 0.9350775480270386, 0.1864916831254959, 0.2537977397441864, -0.5170114636421204, 0.588100790977478, 0.10007119923830032, -0.5094295144081116, -0.2292865663766861, -0.7310359477996826, -0.9055917859077454, 0.2238156497478485, 0.004717536270618439, -1.0602803230285645, 0.016751015558838844, -0.033278629183769226, -0.15120741724967957, 0.451913058757782, -0.620614767074585, 0.9063373804092407, -0.2033935934305191, -0.41779690980911255, 0.10193154215812683, -0.8284944891929626, 0.45129862427711487, 0.11817889660596848, 0.3187878131866455, -0.19436335563659668, 0.036230072379112244, 1.0813188552856445, -0.7306422591209412, 0.6365345120429993, -0.20424823462963104, 0.17241939902305603, 0.38142478466033936, -0.31807106733322144, 0.561277449131012, -0.009838215075433254, -0.23683595657348633, 0.510076642036438, -0.16575227677822113, -0.30312594771385193, -0.23077930510044098, 0.991367518901825, -1.0028018951416016, -0.2820599377155304, -0.5374037027359009, -0.5403695702552795, 0.29141348600387573, 0.31126558780670166, 0.37721121311187744, 0.27895835041999817, 0.03605316951870918, 0.2549951672554016, 0.2811814248561859, -0.11524874716997147, 0.5324910283088684, 0.3953301012516022, -0.19229546189308167, -0.7889485359191895, 0.782123863697052, 0.27932366728782654, 0.19615943729877472, 0.15536320209503174, 0.15467029809951782, -0.4624261260032654, -0.5003824830055237, -0.46532320976257324, 0.24683330953121185, -0.5205590724945068, -0.3694017231464386, -0.41932716965675354, -0.24562598764896393, -0.4667004346847534, -0.026907989755272865, -0.3613104224205017, -0.43846094608306885, -0.46829015016555786, -0.23916110396385193, 0.7303016781806946, 0.5043519139289856, -0.3076472580432892, 0.33772018551826477, -0.7820850610733032, 0.2827645242214203, -0.29668566584587097, 0.5095492601394653, -0.06644947081804276, -0.5369020104408264, -0.372313916683197, 0.13476376235485077, -0.5045484900474548, -0.9199907183647156, 0.48077401518821716, -0.0376267209649086, 0.631406843662262, 0.16755688190460205, 0.18921053409576416, 0.732654869556427, -0.221265509724617, 1.1439388990402222, -0.005947528872638941, -0.7528243660926819, 0.7657211422920227, -0.3763848543167114, 0.11246099323034286, 0.47899314761161804, 0.2828345000743866, -0.3919026553630829, -0.22022750973701477, -0.9289756417274475, -1.2646901607513428, 1.2429051399230957, 0.5994062423706055, -0.42021897435188293, 0.10209766775369644, 0.3014620244503021, -0.07243920862674713, 0.1895187795162201, -0.6979488730430603, -0.7584706544876099, -0.1669030636548996, -0.2775259017944336, -0.023010235279798508, 0.12348871678113937, -0.373142808675766, -0.3409585952758789, 0.9362779855728149, -0.016401950269937515, 0.5124855637550354, 0.2165086269378662, -0.042571038007736206, -0.15737521648406982, 0.182028666138649, 0.3804382085800171, 0.6289524435997009, -0.44674718379974365, -0.1073102280497551, 0.22517208755016327, -0.7093995809555054, 0.014354645274579525, 0.3767892122268677, -0.10306424647569656, -0.18683482706546783, 0.5379564166069031, 1.0068411827087402, 0.039676565676927567, -0.3579091727733612, 0.5056972503662109, 0.09113985300064087, -0.297762393951416, -0.4568347930908203, 0.08841320127248764, -0.1173175498843193, 0.37984901666641235, 0.4503566324710846, 0.004322630353271961, -0.024780502542853355, -0.4335233271121979, 0.17684708535671234, 0.21492592990398407, -0.034640032798051834, -0.33852618932724, 0.6252956986427307, -0.06786876171827316, -0.3678269684314728, 0.8337993025779724, -0.09338636696338654, -0.570534348487854, 1.004134178161621, 0.32367101311683655, 0.8812251687049866, -0.19522668421268463, 0.06689201295375824, 0.6372288465499878, 0.34236574172973633, -0.213814377784729, 0.5981388688087463, 0.10003973543643951, -0.4904257655143738, -0.16974368691444397, -0.8275130391120911, -0.14102335274219513, 0.3469351828098297, -1.0644383430480957, 0.3902624249458313, -0.21076294779777527, -0.19860835373401642, -0.0971747413277626, 0.33654507994651794, -0.8045863509178162, 0.1444213092327118, -0.10356510430574417, 0.9006312489509583, -1.0199685096740723, 0.5219300985336304, 0.7800309658050537, -0.6081104874610901, -0.9423324465751648, -0.20982585847377777, 0.06929338723421097, -0.7447713017463684, 0.4103165566921234, 0.25275251269340515, 0.3808722496032715, -0.10848919302225113, -0.6033194065093994, -1.0596801042556763, 1.5792101621627808, 0.1086847260594368, -0.45603033900260925, 0.10646932572126389, 0.08135195821523666, 0.3426720201969147, -0.27744367718696594, 0.6375178694725037, 0.7440472841262817, 0.7353066802024841, -0.019824223592877388, -0.9251601099967957, 0.3699061870574951, -0.4238296151161194, -0.16712163388729095, 0.5157290697097778, -0.8859763741493225, 1.1652135848999023, -0.21063406765460968, 0.001583896460942924, 0.14046768844127655, 0.44460272789001465, 0.5043950080871582, 0.30839866399765015, 0.39631199836730957, 0.734870433807373, 0.642304539680481, -0.5115006566047668, 1.000317931175232, -0.22421175241470337, 0.8479956388473511, 1.0534396171569824, 0.02491047792136669, 0.8390675187110901, 0.36416664719581604, -0.45724257826805115, 0.5260983109474182, 0.8599034547805786, -0.38641518354415894, 0.5016135573387146, 0.12290747463703156, -0.04354849085211754, 0.07056038081645966, 0.13977476954460144, -0.5298410058021545, 0.3635489344596863, 0.16059817373752594, -0.6217315196990967, -0.167489692568779, -0.2780763506889343, 0.12317337095737457, -0.3183658719062805, -0.28324562311172485, 0.5457388758659363, -0.07045922428369522, -0.5799248814582825, 0.6517268419265747, 0.007406429387629032, 0.6189413666725159, -0.6605750918388367, -0.1659843772649765, -0.10596934705972672, 0.32404452562332153, -0.5772501826286316, -1.0061240196228027, 0.14799341559410095, 0.07918570935726166, -0.1936561018228531, -0.17499227821826935, 0.5196330547332764, -0.25451910495758057, -0.4468003809452057, 0.38436201214790344, 0.28384092450141907, 0.36783501505851746, 0.05530437082052231, -0.7573257088661194, 0.20686355233192444, 0.24687980115413666, -0.7956399917602539, 0.39205196499824524, 0.29349014163017273, -0.03556814789772034, 0.4260859191417694, 0.8283988237380981, 0.15772558748722076, 0.13314653933048248, -0.17319786548614502, 1.1206250190734863, -0.7440207600593567, -0.45574042201042175, -0.8535997271537781, 0.8964206576347351, -0.23108728229999542, -0.7551904320716858, 0.841265082359314, 1.0052697658538818, 0.8893371820449829, 0.0856589674949646, 0.7890800833702087, -0.575282871723175, 0.43444186449050903, -0.4294431507587433, 0.8506547808647156, -0.6356562972068787, 0.26398375630378723, -0.23256735503673553, -0.8930818438529968, 0.059649284929037094, 0.7074876427650452, -0.3136269450187683, 0.022411394864320755, 0.6919394135475159, 0.8856103420257568, -0.04115195944905281, 0.2695631682872772, -0.13797961175441742, 0.43754637241363525, 0.3217935562133789, 0.6048628091812134, 0.6380244493484497, -0.7183215618133545, 0.4641116261482239, -0.5853785276412964, -0.54292893409729, -0.18407604098320007, -0.6489458084106445, -0.7601318359375, -0.5704474449157715, -0.29640403389930725, -0.5834821462631226, -0.055184781551361084, 1.018784523010254, 0.50091552734375, -0.795026957988739, -0.28579801321029663, -0.0689111277461052, 0.11499118059873581, -0.2896718978881836, -0.36634543538093567, 0.6146844625473022, -0.0014850746374577284, -0.743506133556366, 0.31617680191993713, -0.1527063548564911, -0.17514660954475403, -0.0663638710975647, -0.2723219096660614, -0.35628482699394226, -0.29691368341445923, 0.38354310393333435, 0.16808316111564636, -0.6865670084953308, -0.38341084122657776, -0.10389433801174164, 0.04621925950050354, 0.30150705575942993, 0.3770211637020111, -0.5649662613868713, 0.0663989707827568, 0.6913968920707703, 0.15367919206619263, 0.6737136244773865, 0.07145954668521881, 0.23326817154884338, -0.6594870090484619, -0.04389799386262894, 0.06276856362819672, 0.49384528398513794, 0.1535918116569519, -0.4802270531654358, 0.9834837913513184, 0.36203110218048096, -0.7667892575263977, -0.988886833190918, -0.2137802690267563, -1.0979317426681519, 0.0181058831512928, 1.368747353553772, -0.40713220834732056, -0.3662618398666382, 0.12276992201805115, -0.215457946062088, 0.324592649936676, -0.7894309163093567, 0.4835430383682251, 0.7190971374511719, -0.3103829324245453, 0.058278538286685944, -0.6937236785888672, 0.2703530788421631, -0.013095381669700146, -0.9907238483428955, 0.008240459486842155, 0.37197861075401306, 0.4231545329093933, 0.34721580147743225, 0.6153597235679626, -0.1348692774772644, -0.05898924916982651, 0.0191811453551054, 0.1764460802078247, -0.3027283251285553, -0.17739643156528473, -0.19762413203716278, 0.17543675005435944, -0.35378122329711914, -0.6208849549293518 ]
open-llm-leaderboard/details_OpenAssistant__pythia-12b-sft-v8-2.5k-steps
open-llm-leaderboard
2023-10-19T06:06:05Z
200
0
[ "region:us" ]
null
2023-08-18T11:12:53Z
--- pretty_name: Evaluation run of OpenAssistant/pythia-12b-sft-v8-2.5k-steps dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [OpenAssistant/pythia-12b-sft-v8-2.5k-steps](https://huggingface.co/OpenAssistant/pythia-12b-sft-v8-2.5k-steps)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__pythia-12b-sft-v8-2.5k-steps\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T06:05:53.274569](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__pythia-12b-sft-v8-2.5k-steps/blob/main/results_2023-10-19T06-05-53.274569.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0008389261744966443,\n\ \ \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.04918729026845652,\n\ \ \"f1_stderr\": 0.0011958323498480873,\n \"acc\": 0.3760981059411563,\n\ \ \"acc_stderr\": 0.010720714478256874\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n\ \ \"f1\": 0.04918729026845652,\n \"f1_stderr\": 0.0011958323498480873\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09552691432903715,\n \ \ \"acc_stderr\": 0.008096605771155738\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6566692975532754,\n \"acc_stderr\": 0.013344823185358009\n\ \ }\n}\n```" repo_url: https://huggingface.co/OpenAssistant/pythia-12b-sft-v8-2.5k-steps leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:14:20.845496.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T06_05_53.274569 path: - '**/details_harness|drop|3_2023-10-19T06-05-53.274569.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T06-05-53.274569.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T06_05_53.274569 path: - '**/details_harness|gsm8k|5_2023-10-19T06-05-53.274569.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T06-05-53.274569.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hellaswag|10_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:14:20.845496.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:14:20.845496.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_14_20.845496 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:14:20.845496.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:14:20.845496.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T06_05_53.274569 path: - '**/details_harness|winogrande|5_2023-10-19T06-05-53.274569.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T06-05-53.274569.parquet' - config_name: results data_files: - split: 2023_07_19T18_14_20.845496 path: - results_2023-07-19T18:14:20.845496.parquet - split: 2023_10_19T06_05_53.274569 path: - results_2023-10-19T06-05-53.274569.parquet - split: latest path: - results_2023-10-19T06-05-53.274569.parquet --- # Dataset Card for Evaluation run of OpenAssistant/pythia-12b-sft-v8-2.5k-steps ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/pythia-12b-sft-v8-2.5k-steps - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/pythia-12b-sft-v8-2.5k-steps](https://huggingface.co/OpenAssistant/pythia-12b-sft-v8-2.5k-steps) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__pythia-12b-sft-v8-2.5k-steps", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T06:05:53.274569](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__pythia-12b-sft-v8-2.5k-steps/blob/main/results_2023-10-19T06-05-53.274569.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.04918729026845652, "f1_stderr": 0.0011958323498480873, "acc": 0.3760981059411563, "acc_stderr": 0.010720714478256874 }, "harness|drop|3": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.04918729026845652, "f1_stderr": 0.0011958323498480873 }, "harness|gsm8k|5": { "acc": 0.09552691432903715, "acc_stderr": 0.008096605771155738 }, "harness|winogrande|5": { "acc": 0.6566692975532754, "acc_stderr": 0.013344823185358009 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3099954426288605, -0.6855152249336243, 0.1829529106616974, 0.19516654312610626, -0.1361670047044754, 0.11113414168357849, -0.3320028483867645, -0.12022875994443893, 0.31832990050315857, 0.4777339696884155, -0.686545729637146, -0.8725783228874207, -0.6293806433677673, 0.124923475086689, -0.17941831052303314, 1.1184346675872803, -0.42489445209503174, -0.14579561352729797, 0.13478979468345642, -0.24232427775859833, -0.32621508836746216, -0.35244986414909363, -0.37798845767974854, -0.39253148436546326, 0.35292738676071167, 0.6136487722396851, 0.4582439064979553, 0.5800344347953796, 0.6784540414810181, 0.3740615248680115, -0.09866050630807877, 0.13571277260780334, -0.43689391016960144, -0.11168141663074493, 0.1199016124010086, -0.5448404550552368, -0.6779589653015137, 0.16744346916675568, 0.7169830203056335, 0.4435715973377228, -0.09666900336742401, 0.6372252702713013, 0.03923473134636879, 0.6170979142189026, -0.47854647040367126, 0.40606173872947693, -0.2512570917606354, -0.03398118540644646, -0.319280743598938, -0.16543304920196533, -0.09208876639604568, -0.42156466841697693, -0.08735530823469162, -0.5034313201904297, 0.16699285805225372, 0.17562337219715118, 1.0553805828094482, 0.06424540281295776, -0.15118005871772766, -0.17577756941318512, -0.3922363519668579, 0.7718251347541809, -0.7673458456993103, 0.029045449569821358, 0.6238875985145569, 0.14791756868362427, -0.28299257159233093, -0.6526821255683899, -0.31107184290885925, -0.11747517436742783, -0.2705090641975403, 0.13314786553382874, -0.09733812510967255, -0.10774549841880798, 0.4740088880062103, 0.7456503510475159, -0.7693760395050049, -0.07530484348535538, -0.604278028011322, -0.20815034210681915, 0.9031593203544617, 0.31041404604911804, 0.10984079539775848, -0.46298351883888245, -0.37476834654808044, -0.31081387400627136, -0.465503990650177, 0.21132828295230865, 0.543438196182251, 0.4682314693927765, -0.6139780879020691, 0.8481642007827759, -0.551059365272522, 0.3962085247039795, -0.10504193603992462, -0.07612743228673935, 0.8256381154060364, -0.5640758275985718, -0.21783006191253662, 0.02203623577952385, 1.097561001777649, 0.34114477038383484, 0.047313109040260315, 0.09351611137390137, -0.2467838078737259, -0.13331881165504456, 0.04765308275818825, -0.9003389477729797, -0.3360869586467743, 0.42400768399238586, -0.620231032371521, -0.39033761620521545, 0.39792314171791077, -0.9303121566772461, -0.12486263364553452, -0.2441369742155075, 0.35285016894340515, -0.2805642783641815, -0.47127124667167664, -0.1786973476409912, -0.0702231377363205, 0.2064293622970581, 0.18303796648979187, -0.5870081782341003, 0.3844794034957886, 0.5622572898864746, 1.1579653024673462, -0.0892777219414711, -0.4025522470474243, -0.44242510199546814, -0.32401472330093384, -0.18216536939144135, 0.3632996678352356, -0.09728381037712097, -0.25908809900283813, -0.20759226381778717, 0.370036780834198, -0.3203355371952057, -0.4933672249317169, 0.6075950264930725, -0.27358004450798035, 0.08004879951477051, -0.28030601143836975, -0.3318440914154053, -0.16746996343135834, 0.2260594218969345, -0.6635558605194092, 1.400342345237732, 0.45511260628700256, -0.8579975962638855, 0.07688324898481369, -0.8471888303756714, -0.1650426685810089, 0.06381688266992569, 0.12302182614803314, -0.5449691414833069, -0.06897234171628952, 0.1444847583770752, 0.4572513699531555, -0.3380497097969055, 0.014712275937199593, -0.17128749191761017, -0.45215359330177307, 0.14493250846862793, -0.17605502903461456, 1.0417689085006714, 0.2085479497909546, -0.4654614329338074, 0.2754729986190796, -0.9233760833740234, 0.13921888172626495, 0.3136509656906128, -0.5408158302307129, -0.1934613436460495, -0.2161046266555786, 0.1460254192352295, 0.17873190343379974, 0.42340317368507385, -0.6433770060539246, 0.3100440502166748, -0.2931450605392456, 0.39004138112068176, 1.0803519487380981, -0.02234678342938423, 0.2598717510700226, -0.4129534065723419, 0.5713820457458496, 0.09164196997880936, 0.2580288350582123, 0.07348761707544327, -0.5883207321166992, -0.6667295098304749, -0.2756539583206177, 0.16655774414539337, 0.6365389227867126, -0.4738351106643677, 0.7138370275497437, -0.3799208402633667, -0.7054645419120789, -0.7110776305198669, 0.01648636721074581, 0.47042936086654663, 0.4933308959007263, 0.4257381558418274, -0.33254727721214294, -0.6259456276893616, -0.8910411596298218, -0.019324060529470444, -0.2452976554632187, 0.05546608939766884, 0.40358835458755493, 1.205008625984192, -0.16808608174324036, 0.700761616230011, -0.7748839855194092, -0.23133917152881622, -0.20068825781345367, 0.044363703578710556, 0.7224219441413879, 0.554827868938446, 0.368443101644516, -0.6722943186759949, -0.373002290725708, 0.013166967779397964, -0.738200306892395, -0.1457591950893402, -0.07877320796251297, -0.36979085206985474, 0.2889212667942047, -0.045705921947956085, -0.6283458471298218, 0.6559385061264038, 0.46742475032806396, -0.6926692724227905, 0.7501260042190552, -0.01964825578033924, 0.46781888604164124, -1.194582223892212, 0.20237839221954346, 0.06628385186195374, 0.005591078195720911, -0.4144015312194824, -0.04970652610063553, 0.058180972933769226, 0.2843315005302429, -0.4131205976009369, 0.5892374515533447, -0.4629918336868286, -0.2198462039232254, 0.11540140211582184, 0.045486658811569214, -0.2706567347049713, 0.556641697883606, -0.24564971029758453, 0.8295000791549683, 0.5207632780075073, -0.3365044593811035, 0.4735804498195648, 0.5386286973953247, -0.5634938478469849, 0.1967926174402237, -0.4873151183128357, 0.0025479041505604982, 0.22371144592761993, 0.1932711899280548, -0.9478074908256531, -0.41632089018821716, 0.5537689924240112, -0.6533589363098145, 0.08334919810295105, -0.3385002613067627, -0.6156789064407349, -0.4965929687023163, -0.5404459834098816, 0.24656294286251068, 0.49329569935798645, -0.5422951579093933, 0.3099347949028015, 0.20799919962882996, 0.03599277883768082, -0.541841447353363, -0.7135124206542969, -0.30033615231513977, -0.29211491346359253, -0.7263526916503906, 0.3362603485584259, -0.12388178706169128, -0.27912312746047974, 0.0251086987555027, -0.16559457778930664, -0.024375848472118378, 0.12314903736114502, 0.4138984978199005, 0.5217987895011902, -0.21973370015621185, -0.3620225489139557, -0.2373526245355606, -0.17148412764072418, 0.1795099526643753, -0.0028075959999114275, 0.6310465335845947, -0.3141060471534729, -0.31084510684013367, -0.28670620918273926, 0.12802977859973907, 0.547600269317627, -0.2771166265010834, 0.9645651578903198, 0.6640109419822693, -0.28046953678131104, -0.003065988887101412, -0.2502945065498352, -0.0171514879912138, -0.47478312253952026, 0.38033509254455566, -0.35561370849609375, -0.7624175548553467, 0.8223081827163696, 0.18385246396064758, 0.17689149081707, 0.6805041432380676, 0.6261213421821594, 0.20823651552200317, 0.8019611239433289, 0.16051752865314484, -0.15180814266204834, 0.5748357772827148, -0.7368159890174866, 0.11599341779947281, -1.131430745124817, -0.3771236836910248, -0.49262797832489014, -0.39556851983070374, -0.8845759034156799, -0.3659210205078125, 0.25706374645233154, 0.2539731562137604, -0.5478424429893494, 0.5380445122718811, -0.6813889145851135, 0.18838389217853546, 0.7272295951843262, 0.3065283000469208, 0.0891762226819992, -0.0005348949343897402, -0.061549875885248184, 0.23211108148097992, -0.4122941195964813, -0.36734968423843384, 1.4318995475769043, 0.2994789183139801, 0.6325017213821411, 0.09180836379528046, 0.9053558707237244, 0.21303847432136536, 0.26473620533943176, -0.5892669558525085, 0.6220612525939941, 0.09750532358884811, -0.48473331332206726, -0.2122514843940735, -0.6827017664909363, -0.8415164947509766, 0.25214025378227234, 0.05732820928096771, -1.0560479164123535, 0.011535943485796452, -0.02686738781630993, -0.09227168560028076, 0.40267395973205566, -0.6123654842376709, 0.8944827318191528, -0.2248709350824356, -0.48449692130088806, 0.041707802563905716, -0.8369144201278687, 0.47135835886001587, 0.12698893249034882, 0.28411659598350525, -0.16305527091026306, 0.10726548731327057, 1.0882031917572021, -0.7655123472213745, 0.6075107455253601, -0.19402959942817688, 0.19626165926456451, 0.3463772237300873, -0.29019325971603394, 0.638545572757721, -0.03193815425038338, -0.1798008531332016, 0.39987561106681824, -0.14284482598304749, -0.2945170998573303, -0.24669693410396576, 0.9398818612098694, -0.9173754453659058, -0.2564816176891327, -0.614602267742157, -0.613041877746582, 0.255663126707077, 0.2640337347984314, 0.34262412786483765, 0.27151331305503845, 0.09977251291275024, 0.2721090614795685, 0.3401746451854706, -0.1525588482618332, 0.5079430341720581, 0.3712303936481476, -0.1796654611825943, -0.7546395659446716, 0.7785102725028992, 0.27673816680908203, 0.17951105535030365, 0.19737005233764648, 0.16451413929462433, -0.4741573631763458, -0.5059415102005005, -0.41820797324180603, 0.2627800405025482, -0.546345055103302, -0.3255454897880554, -0.3717891573905945, -0.249852254986763, -0.4644484519958496, -0.0007623098790645599, -0.3659766614437103, -0.4333752393722534, -0.37170836329460144, -0.30788519978523254, 0.7617960572242737, 0.5489517450332642, -0.32173392176628113, 0.3037364184856415, -0.7280195951461792, 0.27495431900024414, -0.29522451758384705, 0.4754467010498047, -0.06008614972233772, -0.5582402944564819, -0.3966415524482727, 0.12016653269529343, -0.49927112460136414, -0.9800018668174744, 0.5292501449584961, -0.1306314915418625, 0.6131704449653625, 0.09465008974075317, 0.1847265213727951, 0.7631203532218933, -0.1701582819223404, 1.0465253591537476, -0.008619707077741623, -0.7169262170791626, 0.8075779676437378, -0.36844387650489807, 0.13002178072929382, 0.49163398146629333, 0.25615110993385315, -0.43102338910102844, -0.20746617019176483, -0.8592888712882996, -1.256569504737854, 1.2226673364639282, 0.6539078950881958, -0.39463186264038086, 0.14367161691188812, 0.2739023268222809, -0.155487522482872, 0.18736456334590912, -0.738146185874939, -0.7158559560775757, -0.15328988432884216, -0.3316921889781952, 0.007324735634028912, 0.0977347195148468, -0.3491672873497009, -0.47892341017723083, 0.9778561592102051, -0.02946547605097294, 0.4916573762893677, 0.23182056844234467, -0.09179377555847168, -0.19452494382858276, 0.19948604702949524, 0.39882174134254456, 0.6727719306945801, -0.3978678286075592, -0.13904742896556854, 0.19978374242782593, -0.7407921552658081, 0.05803949013352394, 0.3563563823699951, -0.14016953110694885, -0.1405162364244461, 0.5682320594787598, 0.96082603931427, 0.07086533308029175, -0.3884429335594177, 0.49890071153640747, 0.07890275865793228, -0.3038444519042969, -0.44418224692344666, 0.0529010035097599, -0.07275350391864777, 0.39541956782341003, 0.40203386545181274, -0.045384783297777176, -0.052925679832696915, -0.43291622400283813, 0.23245227336883545, 0.2391136884689331, -0.06951495260000229, -0.32719141244888306, 0.5831559300422668, -0.07823450863361359, -0.4027174413204193, 0.8612497448921204, -0.06736703217029572, -0.5684611797332764, 1.0294816493988037, 0.30244532227516174, 0.9836478233337402, -0.18953761458396912, 0.12469927966594696, 0.6419975757598877, 0.39410459995269775, -0.23101481795310974, 0.579643726348877, 0.08451491594314575, -0.43339094519615173, -0.12106603384017944, -0.8944976329803467, -0.16761226952075958, 0.3244039714336395, -0.9988275170326233, 0.37382063269615173, -0.22111932933330536, -0.0776602104306221, -0.1842879354953766, 0.41765308380126953, -0.7423259615898132, 0.15038298070430756, -0.1634669452905655, 0.9297381639480591, -1.0697886943817139, 0.5673122406005859, 0.7805267572402954, -0.6456974744796753, -1.007240891456604, -0.1438615918159485, 0.09155701845884323, -0.6866658926010132, 0.3996994197368622, 0.2998523414134979, 0.3769865930080414, -0.1408817023038864, -0.5891091823577881, -1.016761064529419, 1.5780056715011597, 0.101978600025177, -0.5321967601776123, 0.10329434275627136, 0.11566757410764694, 0.35672545433044434, -0.24080531299114227, 0.6261377930641174, 0.7675259709358215, 0.7246351838111877, -0.009204455651342869, -0.9310847520828247, 0.38990235328674316, -0.42975395917892456, -0.14788565039634705, 0.44430646300315857, -0.8846638202667236, 1.123008370399475, -0.181202232837677, 0.07569579035043716, 0.13442036509513855, 0.42780041694641113, 0.5060871243476868, 0.32074955105781555, 0.3308797776699066, 0.6527137160301208, 0.6483542323112488, -0.4637226164340973, 0.9995218515396118, -0.26861143112182617, 0.864486575126648, 1.0405281782150269, 0.08166007697582245, 0.7744385004043579, 0.38087496161460876, -0.4967540502548218, 0.4666479527950287, 0.8658829927444458, -0.36211323738098145, 0.47111669182777405, 0.1992649883031845, -0.09618021547794342, 0.051585130393505096, 0.1491071581840515, -0.43544507026672363, 0.329754501581192, 0.11724729090929031, -0.6546691656112671, -0.17240241169929504, -0.30158403515815735, 0.09706752002239227, -0.3055723309516907, -0.3024497926235199, 0.5005401372909546, -0.04106813296675682, -0.5679430961608887, 0.5843291878700256, -0.015608089044690132, 0.5897331833839417, -0.7195966839790344, -0.1713227480649948, -0.1599651724100113, 0.347587913274765, -0.6201353073120117, -0.9825141429901123, 0.1937941163778305, 0.12269292026758194, -0.21351593732833862, -0.21232086420059204, 0.5757594108581543, -0.2511427700519562, -0.5285570621490479, 0.4100259840488434, 0.30236878991127014, 0.363150954246521, -0.013083178550004959, -0.7872875928878784, 0.1817355751991272, 0.28506791591644287, -0.8461867570877075, 0.4506547749042511, 0.2672552168369293, -0.09120829403400421, 0.49882447719573975, 0.7943646907806396, 0.14186690747737885, 0.15746566653251648, -0.1376924365758896, 1.1371290683746338, -0.7531495094299316, -0.3872852921485901, -0.8335707783699036, 0.9131393432617188, -0.1930660456418991, -0.737461268901825, 0.8372992873191833, 1.0025391578674316, 0.8557511568069458, 0.07070238888263702, 0.8054038882255554, -0.5627591013908386, 0.4357079565525055, -0.412114679813385, 0.8665606379508972, -0.6066482067108154, 0.25499871373176575, -0.28624749183654785, -0.8434543013572693, 0.08086401969194412, 0.779417097568512, -0.3066215515136719, 0.004481789655983448, 0.713666558265686, 0.9921427965164185, -0.054950013756752014, 0.219144806265831, -0.12567181885242462, 0.4446293115615845, 0.32536283135414124, 0.6555295586585999, 0.6022351980209351, -0.7657181024551392, 0.46761974692344666, -0.6031619310379028, -0.507591962814331, -0.12175019085407257, -0.5652623176574707, -0.7961728572845459, -0.5428535342216492, -0.33173349499702454, -0.6141215562820435, -0.007695354055613279, 1.0995486974716187, 0.4683338403701782, -0.8662796020507812, -0.35006195306777954, -0.1844584345817566, 0.1836753934621811, -0.29525429010391235, -0.36914876103401184, 0.5933958888053894, 0.017809459939599037, -0.6922526955604553, 0.3045070767402649, -0.17041803896427155, -0.20567309856414795, -0.06342998892068863, -0.29040586948394775, -0.40942078828811646, -0.2920197546482086, 0.41148367524147034, 0.1478099822998047, -0.677909791469574, -0.3711453676223755, -0.12435981631278992, 0.053734831511974335, 0.27638158202171326, 0.41154563426971436, -0.5590906143188477, 0.03711123391985893, 0.7123517990112305, 0.15613889694213867, 0.6567911505699158, 0.10007679462432861, 0.24515172839164734, -0.6266379952430725, -0.06949999928474426, 0.03770119324326515, 0.464657187461853, 0.12464046478271484, -0.4722740948200226, 0.9684007167816162, 0.41173607110977173, -0.7321073412895203, -0.9651377201080322, -0.19460497796535492, -1.0752077102661133, 0.03354567661881447, 1.3741062879562378, -0.37350979447364807, -0.37343719601631165, 0.10990610718727112, -0.22533845901489258, 0.35777249932289124, -0.7551385760307312, 0.4587058424949646, 0.7648873329162598, -0.3462318778038025, -0.022228119894862175, -0.7450987100601196, 0.28299564123153687, -0.0021513032261282206, -0.9738943576812744, 0.1149083822965622, 0.4148165285587311, 0.47095662355422974, 0.2643159031867981, 0.6477335691452026, -0.08926688879728317, 0.011762131936848164, 0.018064893782138824, 0.08645115047693253, -0.29239776730537415, -0.23135824501514435, -0.25798359513282776, 0.15452562272548676, -0.36237627267837524, -0.5880230665206909 ]
open-llm-leaderboard/details_openchat__openchat_v2
open-llm-leaderboard
2023-10-18T23:34:12Z
200
0
[ "region:us" ]
null
2023-08-18T11:15:13Z
--- pretty_name: Evaluation run of openchat/openchat_v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [openchat/openchat_v2](https://huggingface.co/openchat/openchat_v2) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_openchat__openchat_v2\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-18T23:33:59.473281](https://huggingface.co/datasets/open-llm-leaderboard/details_openchat__openchat_v2/blob/main/results_2023-10-18T23-33-59.473281.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n\ \ \"em_stderr\": 0.0004191330178826953,\n \"f1\": 0.06369546979865812,\n\ \ \"f1_stderr\": 0.0013881754743750058,\n \"acc\": 0.4267044764366107,\n\ \ \"acc_stderr\": 0.009941310874908384\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.0004191330178826953,\n\ \ \"f1\": 0.06369546979865812,\n \"f1_stderr\": 0.0013881754743750058\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09097801364670205,\n \ \ \"acc_stderr\": 0.007921322844013628\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803141\n\ \ }\n}\n```" repo_url: https://huggingface.co/openchat/openchat_v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|arc:challenge|25_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T16:15:43.375202.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_18T23_33_59.473281 path: - '**/details_harness|drop|3_2023-10-18T23-33-59.473281.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-18T23-33-59.473281.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_18T23_33_59.473281 path: - '**/details_harness|gsm8k|5_2023-10-18T23-33-59.473281.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-18T23-33-59.473281.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hellaswag|10_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:15:43.375202.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:15:43.375202.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T16_15_43.375202 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T16:15:43.375202.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T16:15:43.375202.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_18T23_33_59.473281 path: - '**/details_harness|winogrande|5_2023-10-18T23-33-59.473281.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-18T23-33-59.473281.parquet' - config_name: results data_files: - split: 2023_07_24T16_15_43.375202 path: - results_2023-07-24T16:15:43.375202.parquet - split: 2023_10_18T23_33_59.473281 path: - results_2023-10-18T23-33-59.473281.parquet - split: latest path: - results_2023-10-18T23-33-59.473281.parquet --- # Dataset Card for Evaluation run of openchat/openchat_v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/openchat/openchat_v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [openchat/openchat_v2](https://huggingface.co/openchat/openchat_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_openchat__openchat_v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T23:33:59.473281](https://huggingface.co/datasets/open-llm-leaderboard/details_openchat__openchat_v2/blob/main/results_2023-10-18T23-33-59.473281.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.0004191330178826953, "f1": 0.06369546979865812, "f1_stderr": 0.0013881754743750058, "acc": 0.4267044764366107, "acc_stderr": 0.009941310874908384 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.0004191330178826953, "f1": 0.06369546979865812, "f1_stderr": 0.0013881754743750058 }, "harness|gsm8k|5": { "acc": 0.09097801364670205, "acc_stderr": 0.007921322844013628 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803141 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.34528371691703796, -0.7453863024711609, 0.09413585066795349, 0.25963976979255676, -0.15682914853096008, 0.08291513472795486, -0.47406163811683655, -0.24490223824977875, 0.4016643464565277, 0.5277593731880188, -0.6528066992759705, -0.8955895900726318, -0.5716219544410706, 0.015298589132726192, -0.0779741182923317, 1.097633957862854, -0.25526776909828186, -0.24356503784656525, -0.014814192429184914, -0.3351804316043854, -0.43610358238220215, -0.3949289321899414, -0.557729184627533, -0.41120752692222595, 0.2913697361946106, 0.7079987525939941, 0.41254961490631104, 0.5369364023208618, 0.6472342610359192, 0.3745044469833374, -0.07767646014690399, 0.27597174048423767, -0.6409285068511963, -0.09529239684343338, 0.2239634096622467, -0.6620829701423645, -0.7517644166946411, 0.16223640739917755, 0.6441296339035034, 0.44116827845573425, -0.13635429739952087, 0.56073397397995, 0.22237098217010498, 0.6290411949157715, -0.5159649848937988, 0.5050832033157349, -0.40222784876823425, -0.0964699536561966, -0.3249494433403015, -0.15784497559070587, -0.10324307531118393, -0.3907395005226135, -0.20890073478221893, -0.5054667592048645, -0.024528561159968376, 0.1082775816321373, 1.074493408203125, 0.008250005543231964, -0.07969774305820465, -0.2605809271335602, -0.47830355167388916, 0.80607008934021, -0.8113073706626892, -0.007829375565052032, 0.7258031368255615, 0.20751376450061798, -0.3140956461429596, -0.6054427623748779, -0.3721076548099518, -0.05377655476331711, -0.2915356159210205, 0.17974750697612762, -0.08954132348299026, -0.14753684401512146, 0.30358779430389404, 0.6164031028747559, -0.6619013547897339, 0.10447031259536743, -0.6432556509971619, -0.20771533250808716, 0.879892110824585, 0.34856659173965454, 0.12988880276679993, -0.3118250072002411, -0.3339739441871643, -0.25181683897972107, -0.32292699813842773, 0.3083995282649994, 0.47576388716697693, 0.6002496480941772, -0.7347885370254517, 0.8998534679412842, -0.4967893958091736, 0.45622387528419495, -0.16266274452209473, -0.15541134774684906, 0.8740711212158203, -0.6917639374732971, -0.1663479506969452, -0.045519594103097916, 1.191860556602478, 0.5074285268783569, 0.16793613135814667, 0.28332412242889404, -0.16539451479911804, -0.09815797954797745, 0.04758331552147865, -0.7933188080787659, -0.12502866983413696, 0.4414530098438263, -0.6391754150390625, -0.40272507071495056, 0.27825674414634705, -0.879282534122467, -0.17786052823066711, -0.15355415642261505, 0.15295644104480743, -0.24019993841648102, -0.46663928031921387, -0.07397043704986572, -0.032260555773973465, 0.1874312311410904, 0.1755947321653366, -0.6227237582206726, 0.43286508321762085, 0.6162168979644775, 1.1601519584655762, -0.011502931825816631, -0.3322334289550781, -0.26719143986701965, -0.388980895280838, -0.2254273146390915, 0.4595527946949005, -0.22869451344013214, -0.33702346682548523, -0.09973515570163727, 0.30846020579338074, -0.3189569413661957, -0.5354440212249756, 0.6825914978981018, -0.23982955515384674, 0.23823724687099457, -0.2182387262582779, -0.3903051018714905, -0.009636370465159416, 0.2518203854560852, -0.6350979804992676, 1.426262617111206, 0.3897422254085541, -0.9246364831924438, 0.05051290616393089, -0.9463728070259094, -0.22624824941158295, 0.030529316514730453, 0.019496751949191093, -0.5039855241775513, -0.2593068480491638, 0.12778544425964355, 0.538969099521637, -0.3785941004753113, -0.13414523005485535, -0.3343345522880554, -0.4401792287826538, 0.12490987032651901, -0.009889064356684685, 1.0861976146697998, 0.18507564067840576, -0.3373924791812897, 0.20467469096183777, -0.8492317795753479, 0.19153271615505219, 0.44229406118392944, -0.5427510142326355, -0.37347477674484253, -0.17324407398700714, 0.06053438410162926, 0.13751952350139618, 0.47885432839393616, -0.7325728535652161, 0.3009313642978668, -0.3171333968639374, 0.4545937478542328, 0.8914097547531128, -0.041065286844968796, 0.39115509390830994, -0.36701318621635437, 0.4049918055534363, 0.13288620114326477, 0.36238157749176025, -0.05277141556143761, -0.6924286484718323, -0.7331245541572571, -0.1786487102508545, 0.08158355951309204, 0.798445463180542, -0.5312792658805847, 0.8088786005973816, -0.43926194310188293, -0.7074214816093445, -0.7972555756568909, 0.0705060288310051, 0.4828468859195709, 0.4472905993461609, 0.30209580063819885, -0.39974966645240784, -0.6294159889221191, -0.9209190011024475, 0.0012688160641118884, -0.30912330746650696, 0.04016440361738205, 0.6461685299873352, 0.9561922550201416, -0.30922266840934753, 0.8260374665260315, -0.7303503155708313, -0.40148839354515076, -0.19811829924583435, 0.020278213545680046, 0.6387641429901123, 0.5463701486587524, 0.49880897998809814, -0.659210205078125, -0.42342692613601685, 0.08202570676803589, -0.7807638049125671, -0.17551620304584503, -0.09274495393037796, -0.2854696214199066, 0.4199266731739044, 0.046396318823099136, -0.6263046264648438, 0.5844975709915161, 0.5316348671913147, -0.573386549949646, 0.6372413039207458, -0.036682575941085815, 0.42055070400238037, -1.1551759243011475, 0.13438646495342255, 0.07387413084506989, 0.012148367241024971, -0.45593565702438354, -0.28540274500846863, 0.05631544440984726, 0.2769491672515869, -0.39648866653442383, 0.7717236280441284, -0.41810789704322815, -0.17661625146865845, 0.05904774367809296, 0.16926833987236023, -0.25891393423080444, 0.5631201267242432, -0.40956127643585205, 0.8282607197761536, 0.4934168756008148, -0.31951525807380676, 0.45703497529029846, 0.5234564542770386, -0.5601221919059753, 0.2758294641971588, -0.6136590242385864, 0.10553502291440964, 0.17262905836105347, 0.20377351343631744, -1.1108369827270508, -0.3306563198566437, 0.5951640009880066, -0.7083194851875305, 0.11610262095928192, -0.2877901792526245, -0.5931763648986816, -0.5769721269607544, -0.4450295865535736, 0.13274215161800385, 0.5421478748321533, -0.39499449729919434, 0.15672728419303894, 0.4766707718372345, -0.013960788026452065, -0.6237102746963501, -0.6889491081237793, -0.11098204553127289, -0.21116290986537933, -0.7766041159629822, 0.2707569897174835, -0.11597868800163269, -0.2787524163722992, 0.016801144927740097, -0.031064674258232117, -0.08642900735139847, 0.03792145475745201, 0.36204689741134644, 0.43245595693588257, -0.1523612141609192, -0.33965057134628296, -0.3014361262321472, -0.1704489290714264, 0.11611393839120865, 0.0663987323641777, 0.666735827922821, -0.286753386259079, -0.32553791999816895, -0.25160279870033264, 0.1493992805480957, 0.4770358204841614, -0.21383564174175262, 0.9148394465446472, 0.7492527961730957, -0.2266848385334015, 0.004044871777296066, -0.35298943519592285, -0.06781651824712753, -0.4976920485496521, 0.38218241930007935, -0.27294492721557617, -0.9575214982032776, 0.9151626229286194, 0.2823212444782257, 0.23416367173194885, 0.5558998584747314, 0.60340815782547, 0.11221543699502945, 0.805031418800354, 0.23115895688533783, -0.22268737852573395, 0.5877129435539246, -0.5800982117652893, 0.07726217806339264, -1.010773777961731, -0.4106210470199585, -0.518197774887085, -0.33964604139328003, -0.8638867139816284, -0.4448487460613251, 0.19073134660720825, 0.1528095304965973, -0.37720194458961487, 0.49121907353401184, -0.6016392707824707, 0.272179514169693, 0.7258972525596619, 0.26924583315849304, -0.039925649762153625, -0.05566711723804474, -0.055926162749528885, 0.23794840276241302, -0.4551895260810852, -0.48894163966178894, 1.307355523109436, 0.3685840964317322, 0.6856182217597961, -0.07481525093317032, 0.8446719646453857, 0.24240058660507202, 0.21329233050346375, -0.5585434436798096, 0.6992243528366089, 0.013263062573969364, -0.41059935092926025, -0.2754437029361725, -0.6541177034378052, -1.0113388299942017, 0.1640368551015854, 0.029661308974027634, -1.064224123954773, -0.06116830185055733, -0.035147275775671005, 0.06380262970924377, 0.32672595977783203, -0.6336960792541504, 0.8813961744308472, -0.09945023059844971, -0.3076804578304291, 0.01935439556837082, -0.9176834225654602, 0.5225988030433655, 0.11966175585985184, 0.368539959192276, -0.26635727286338806, 0.009047521278262138, 1.0589622259140015, -0.6539804935455322, 0.8696485161781311, -0.21458767354488373, 0.08102641999721527, 0.46078237891197205, -0.3156050145626068, 0.5850165486335754, -0.12503831088542938, -0.1557629555463791, 0.4652712643146515, -0.1972496509552002, -0.2865104377269745, -0.31827428936958313, 0.9072831273078918, -0.9188647270202637, -0.2184167057275772, -0.2442840337753296, -0.5316798090934753, 0.22453448176383972, 0.2799850106239319, 0.2910396456718445, 0.2673651874065399, -0.0618995726108551, 0.29746487736701965, 0.2804969847202301, -0.29205870628356934, 0.4286840260028839, 0.34890931844711304, -0.17487187683582306, -0.7317396998405457, 0.6471643447875977, 0.25190213322639465, 0.23792465031147003, 0.17366494238376617, 0.07438977062702179, -0.4674486517906189, -0.3333226144313812, -0.3169417083263397, 0.2793477177619934, -0.5759867429733276, -0.22474253177642822, -0.5041710138320923, -0.32695460319519043, -0.5301651358604431, 0.0859452411532402, -0.4883265793323517, -0.4297684133052826, -0.4505433142185211, -0.10012967139482498, 0.6773092150688171, 0.5450973510742188, -0.3193581998348236, 0.302567720413208, -0.8220519423484802, 0.23226352035999298, -0.2129196673631668, 0.5941554307937622, -0.1306595653295517, -0.45412126183509827, -0.4793413579463959, 0.3443540036678314, -0.3512459993362427, -0.8085804581642151, 0.4216478765010834, 0.04430028423666954, 0.6752455234527588, 0.12289062887430191, 0.2166636437177658, 0.6916570067405701, -0.23818539083003998, 1.0411442518234253, 0.04544423520565033, -0.6286570429801941, 0.7754380106925964, -0.4207674264907837, 0.12583667039871216, 0.43713048100471497, 0.1978847235441208, -0.547520637512207, -0.2905452251434326, -0.8508867025375366, -1.1347064971923828, 1.173604965209961, 0.685262143611908, -0.12366212904453278, 0.0752916932106018, 0.3251158595085144, -0.19460377097129822, 0.126398965716362, -0.5780152678489685, -0.7885034084320068, -0.11350784450769424, -0.2741760015487671, -0.09234242141246796, -0.10143399983644485, -0.31581056118011475, -0.4034840166568756, 0.7806938886642456, -0.04536980763077736, 0.6650885343551636, 0.11944830417633057, 0.054794326424598694, -0.14322324097156525, 0.2186979204416275, 0.6185710430145264, 0.638913631439209, -0.32812049984931946, -0.1737135797739029, 0.14313368499279022, -0.6733049750328064, -0.013723719865083694, 0.23981048166751862, 0.07381236553192139, -0.10578635334968567, 0.5197532176971436, 1.0446206331253052, 0.05628542974591255, -0.39924684166908264, 0.6202294230461121, -0.018892673775553703, -0.3185870945453644, -0.4306454062461853, 0.1321987360715866, 0.02142835408449173, 0.4039708077907562, 0.38270413875579834, -0.14462727308273315, -0.13652727007865906, -0.33398160338401794, 0.23796933889389038, 0.30445945262908936, -0.1968882977962494, -0.3312058448791504, 0.5279182195663452, 0.01226312667131424, -0.4801888167858124, 0.8146238923072815, -0.11070480197668076, -0.578632652759552, 1.0712354183197021, 0.3042362630367279, 0.885522186756134, -0.19266007840633392, 0.11325077712535858, 0.5980276465415955, 0.4242594838142395, -0.12023070454597473, 0.6459136009216309, 0.17098647356033325, -0.6898688077926636, -0.2312983274459839, -0.6337680816650391, -0.26708170771598816, 0.33888933062553406, -0.951276957988739, 0.4374772906303406, -0.05749009922146797, -0.24499697983264923, -0.08879969269037247, 0.2539084851741791, -0.7805907726287842, -0.0014479038072749972, -0.0824216827750206, 0.8747887015342712, -0.9478263854980469, 0.6088883280754089, 0.8787493705749512, -0.4388589560985565, -0.8352078795433044, -0.3580809533596039, 0.10488978028297424, -0.885356605052948, 0.4321731925010681, 0.2892223298549652, 0.4405057728290558, -0.14290177822113037, -0.7200832962989807, -0.9627246856689453, 1.4183313846588135, 0.07194464653730392, -0.4762459099292755, 0.21188247203826904, 0.24851371347904205, 0.38313087821006775, -0.3544420897960663, 0.6737973690032959, 0.6598917245864868, 0.6381843686103821, 0.008589114993810654, -1.1832839250564575, 0.27986741065979004, -0.5594205260276794, -0.1109432578086853, 0.3969254493713379, -0.9208897948265076, 1.0622704029083252, -0.254061758518219, -0.0839708000421524, -0.03287821635603905, 0.4197084903717041, 0.39498579502105713, 0.3232246935367584, 0.4700128436088562, 0.5886617302894592, 0.6142355799674988, -0.3760230839252472, 0.9534428119659424, -0.2673594653606415, 0.8577156066894531, 1.1142306327819824, -0.007726518902927637, 0.7023233771324158, 0.286550372838974, -0.42728951573371887, 0.42608657479286194, 0.8329563736915588, -0.25317248702049255, 0.40542274713516235, 0.08690650761127472, 0.06099463254213333, 0.0580727644264698, 0.04272191599011421, -0.4057576656341553, 0.48641833662986755, 0.2564411759376526, -0.5445423126220703, -0.22104677557945251, -0.20816351473331451, 0.19262097775936127, -0.32527637481689453, -0.147583469748497, 0.701179563999176, -0.016338270157575607, -0.5315564274787903, 0.7474821209907532, -0.08354920148849487, 0.7036185264587402, -0.6633433103561401, -0.15270568430423737, -0.17594783008098602, 0.34589388966560364, -0.4617002606391907, -1.130638837814331, 0.23667292296886444, -0.011091282591223717, -0.12527284026145935, -0.18011072278022766, 0.7635994553565979, -0.314441055059433, -0.36569106578826904, 0.4566558301448822, 0.39623910188674927, 0.3862243592739105, 0.04900366812944412, -0.9071381092071533, 0.2603907585144043, 0.3073541224002838, -0.8307604789733887, 0.42128118872642517, 0.31941089034080505, 0.012631102465093136, 0.5838752388954163, 0.7238772511482239, 0.029486248269677162, 0.024357277899980545, -0.08655872195959091, 1.2071752548217773, -0.6637851595878601, -0.4608015716075897, -0.85929274559021, 0.9419505000114441, -0.306142121553421, -0.6489207148551941, 0.8206079006195068, 0.8980752825737, 0.8367146849632263, 0.1439341902732849, 0.8572250604629517, -0.5789465308189392, 0.5743564963340759, -0.3239585757255554, 0.8710687160491943, -0.6213789582252502, 0.23398782312870026, -0.2273920774459839, -0.7832151651382446, 0.03308890759944916, 0.6860399842262268, -0.1964837908744812, 0.016373712569475174, 0.5560977458953857, 1.0406469106674194, 0.01155431941151619, 0.23865829408168793, -0.08170832693576813, 0.3856258988380432, 0.34470927715301514, 0.5701724886894226, 0.6555202603340149, -0.6738017797470093, 0.48653510212898254, -0.5805495381355286, -0.5578981637954712, -0.17222902178764343, -0.6271722316741943, -0.9139389395713806, -0.6896513104438782, -0.29530543088912964, -0.5924555063247681, -0.033984676003456116, 1.0091683864593506, 0.5249318480491638, -0.7297669649124146, -0.4108878970146179, 0.046912867575883865, 0.1681257039308548, -0.18352188169956207, -0.37391242384910583, 0.39896178245544434, -0.20787736773490906, -0.7082169651985168, 0.2650907635688782, -0.18408414721488953, -0.10089018195867538, -0.03370575234293938, -0.24606953561306, -0.3447103798389435, -0.2647033631801605, 0.5589138865470886, 0.22217760980129242, -0.6284305453300476, -0.35991689562797546, -0.09333286434412003, -0.1365123838186264, 0.3689192533493042, 0.36942803859710693, -0.4741120934486389, 0.11884966492652893, 0.6572449803352356, 0.14747461676597595, 0.6100738048553467, 0.08857264369726181, 0.24137817323207855, -0.765321671962738, 0.023057499900460243, 0.02089161053299904, 0.38841792941093445, 0.14521761238574982, -0.43541547656059265, 1.0089062452316284, 0.300427109003067, -0.7575273513793945, -0.9899170398712158, -0.24711336195468903, -1.1504894495010376, 0.02041487582027912, 1.5521281957626343, -0.3782655596733093, -0.29612475633621216, -0.029028363525867462, -0.2712375223636627, 0.3174852728843689, -0.766153872013092, 0.4336942136287689, 0.7021840810775757, -0.2730007767677307, -0.012481092475354671, -0.6844239234924316, 0.2686115503311157, -0.10102061182260513, -1.0344035625457764, 0.12298134714365005, 0.32916510105133057, 0.3466056287288666, 0.25064802169799805, 0.8227312564849854, -0.021717870607972145, -0.21558520197868347, -0.024331150576472282, 0.24160601198673248, -0.2133638560771942, -0.11594583839178085, -0.12467367947101593, 0.017752934247255325, -0.4165463149547577, -0.6468474268913269 ]
open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4
open-llm-leaderboard
2023-10-22T06:51:28Z
200
0
[ "region:us" ]
null
2023-08-18T11:20:36Z
--- pretty_name: Evaluation run of jondurbin/airoboros-7b-gpt4 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [jondurbin/airoboros-7b-gpt4](https://huggingface.co/jondurbin/airoboros-7b-gpt4)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T06:51:15.368874](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4/blob/main/results_2023-10-22T06-51-15.368874.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.24276426174496643,\n\ \ \"em_stderr\": 0.004390839668047224,\n \"f1\": 0.3038569630872493,\n\ \ \"f1_stderr\": 0.004387376487144696,\n \"acc\": 0.37414887626834564,\n\ \ \"acc_stderr\": 0.008035199409633497\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.24276426174496643,\n \"em_stderr\": 0.004390839668047224,\n\ \ \"f1\": 0.3038569630872493,\n \"f1_stderr\": 0.004387376487144696\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.017437452615617893,\n \ \ \"acc_stderr\": 0.0036054868679982572\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7308602999210734,\n \"acc_stderr\": 0.012464911951268738\n\ \ }\n}\n```" repo_url: https://huggingface.co/jondurbin/airoboros-7b-gpt4 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|arc:challenge|25_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T14:10:25.763486.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T06_51_15.368874 path: - '**/details_harness|drop|3_2023-10-22T06-51-15.368874.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T06-51-15.368874.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T06_51_15.368874 path: - '**/details_harness|gsm8k|5_2023-10-22T06-51-15.368874.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T06-51-15.368874.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hellaswag|10_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:10:25.763486.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:10:25.763486.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T14_10_25.763486 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T14:10:25.763486.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T14:10:25.763486.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T06_51_15.368874 path: - '**/details_harness|winogrande|5_2023-10-22T06-51-15.368874.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T06-51-15.368874.parquet' - config_name: results data_files: - split: 2023_07_31T14_10_25.763486 path: - results_2023-07-31T14:10:25.763486.parquet - split: 2023_10_22T06_51_15.368874 path: - results_2023-10-22T06-51-15.368874.parquet - split: latest path: - results_2023-10-22T06-51-15.368874.parquet --- # Dataset Card for Evaluation run of jondurbin/airoboros-7b-gpt4 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-7b-gpt4 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-7b-gpt4](https://huggingface.co/jondurbin/airoboros-7b-gpt4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T06:51:15.368874](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4/blob/main/results_2023-10-22T06-51-15.368874.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.24276426174496643, "em_stderr": 0.004390839668047224, "f1": 0.3038569630872493, "f1_stderr": 0.004387376487144696, "acc": 0.37414887626834564, "acc_stderr": 0.008035199409633497 }, "harness|drop|3": { "em": 0.24276426174496643, "em_stderr": 0.004390839668047224, "f1": 0.3038569630872493, "f1_stderr": 0.004387376487144696 }, "harness|gsm8k|5": { "acc": 0.017437452615617893, "acc_stderr": 0.0036054868679982572 }, "harness|winogrande|5": { "acc": 0.7308602999210734, "acc_stderr": 0.012464911951268738 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4716660678386688, -0.6450860500335693, 0.14013443887233734, 0.19018305838108063, -0.17413358390331268, 0.08046858757734299, -0.3442706763744354, -0.18403661251068115, 0.3729672431945801, 0.5613752603530884, -0.6229516267776489, -0.9114127159118652, -0.729252278804779, 0.20417343080043793, -0.17752227187156677, 1.1788074970245361, -0.31857845187187195, -0.32758617401123047, 0.10444942116737366, -0.28675395250320435, -0.3466074764728546, -0.4456016421318054, -0.4929239749908447, -0.42295560240745544, 0.3685336112976074, 0.4799152910709381, 0.4053972363471985, 0.7742379307746887, 0.6277559399604797, 0.3899741470813751, -0.18697713315486908, 0.15936875343322754, -0.41206321120262146, -0.14387479424476624, 0.2578875720500946, -0.5674481987953186, -0.7236815690994263, 0.12264899164438248, 0.6763610243797302, 0.46965789794921875, -0.2954798936843872, 0.5583100914955139, 0.09293736517429352, 0.575534462928772, -0.4294397234916687, 0.3914779722690582, -0.37416207790374756, -0.06381825357675552, -0.37940359115600586, -0.1778925359249115, 0.020639875903725624, -0.31699836254119873, -0.1655239313840866, -0.5483776926994324, 0.23150001466274261, 0.14326788485050201, 1.1058367490768433, 0.13416293263435364, -0.2178768664598465, -0.26188012957572937, -0.24252595007419586, 0.8009819388389587, -0.9371206760406494, 0.08490860462188721, 0.6493993997573853, 0.11911089718341827, -0.236747145652771, -0.5377391576766968, -0.3504091501235962, -0.07909125089645386, -0.27103161811828613, 0.19567829370498657, -0.039542630314826965, -0.07380399107933044, 0.4679122865200043, 0.5715985298156738, -0.7915010452270508, -0.02306513674557209, -0.625732958316803, -0.15338106453418732, 0.889604389667511, 0.3038932979106903, 0.05221603065729141, -0.5614849328994751, -0.3513849377632141, -0.3416750431060791, -0.45140549540519714, 0.17487400770187378, 0.5790632367134094, 0.4422811269760132, -0.6249405741691589, 0.7367588877677917, -0.446556031703949, 0.6688451170921326, -0.10278083384037018, -0.266390860080719, 0.8661261796951294, -0.5606108903884888, -0.2983338534832001, 0.05747362971305847, 0.9900140762329102, 0.3953295946121216, -0.0015914634568616748, 0.22290535271167755, -0.28151318430900574, -0.10445112735033035, 0.12577573955059052, -0.7896264791488647, -0.21745659410953522, 0.3656665086746216, -0.5900177359580994, -0.41893666982650757, 0.3002801835536957, -0.8791435360908508, -0.10218675434589386, -0.2864241600036621, 0.198892280459404, -0.1767263114452362, -0.46489110589027405, -0.10009738057851791, -0.20409303903579712, 0.21899758279323578, 0.11656545102596283, -0.6478447318077087, 0.3540883958339691, 0.5691598057746887, 0.997455358505249, -0.0898314118385315, -0.40605947375297546, -0.27793025970458984, -0.12688978016376495, -0.22119656205177307, 0.5020841360092163, -0.19662265479564667, -0.4359528422355652, -0.20333288609981537, 0.2707703113555908, -0.328398734331131, -0.6350627541542053, 0.6193256378173828, -0.2712438404560089, 0.19707824289798737, -0.26039233803749084, -0.3942660093307495, -0.15096549689769745, 0.43767327070236206, -0.6652647256851196, 1.3879228830337524, 0.4234800934791565, -0.8916245102882385, 0.09230125695466995, -0.8264844417572021, -0.20448924601078033, 0.05913838744163513, -0.01576380617916584, -0.5733645558357239, -0.16184112429618835, 0.19940242171287537, 0.560676097869873, -0.33110305666923523, 0.13584889471530914, -0.285845011472702, -0.40580126643180847, 0.13576340675354004, -0.15648111701011658, 1.079759955406189, 0.22435913980007172, -0.47213342785835266, 0.07580996304750443, -0.9274522662162781, 0.12752525508403778, 0.26290833950042725, -0.5733878016471863, -0.16975653171539307, -0.35077139735221863, 0.083159901201725, 0.10919978469610214, 0.46011409163475037, -0.47726789116859436, 0.39287739992141724, -0.24434970319271088, 0.2913413941860199, 0.9998605847358704, 0.03135501593351364, 0.1264266073703766, -0.45710206031799316, 0.6309885382652283, 0.026028314605355263, 0.3125745356082916, 0.1306983232498169, -0.5428975820541382, -0.6907811164855957, -0.2155606895685196, 0.12045156955718994, 0.640993595123291, -0.6113902926445007, 0.7347291707992554, -0.37092825770378113, -0.7060914635658264, -0.6794359087944031, 0.18499304354190826, 0.48813673853874207, 0.5417365431785583, 0.3841904401779175, -0.22755946218967438, -0.6850480437278748, -0.9727004170417786, -0.016215309500694275, -0.31240567564964294, 0.08957838267087936, 0.48387598991394043, 0.9724122285842896, -0.3511403799057007, 0.5787523984909058, -0.7717491388320923, -0.2812683880329132, -0.3002188503742218, 0.11669022589921951, 0.8247634172439575, 0.5035343766212463, 0.519864022731781, -0.6877709627151489, -0.34625279903411865, 0.0321020744740963, -0.781774640083313, -0.3267873227596283, -0.08751479536294937, -0.25634053349494934, 0.36130788922309875, -0.03116985224187374, -0.5200252532958984, 0.5447156429290771, 0.610116720199585, -0.5866381525993347, 0.6687771677970886, -0.05042659863829613, 0.4782983362674713, -1.123506784439087, 0.22079133987426758, 0.0479380339384079, 0.052574522793293, -0.40548673272132874, -0.1475236862897873, -0.0016300645656883717, 0.3539264500141144, -0.4314344823360443, 0.6674784421920776, -0.4385993778705597, -0.18161286413669586, 0.015866374596953392, 0.07795584201812744, -0.047653112560510635, 0.5510215759277344, -0.3323575556278229, 0.7477405667304993, 0.5187672972679138, -0.3422439396381378, 0.48861971497535706, 0.4685252606868744, -0.45248857140541077, 0.3339260220527649, -0.536403477191925, -0.06568098068237305, 0.15847982466220856, 0.19190487265586853, -0.8830976486206055, -0.426632285118103, 0.44504380226135254, -0.598606288433075, 0.24965019524097443, -0.4168068468570709, -0.5540480613708496, -0.533614993095398, -0.46696725487709045, 0.21476633846759796, 0.4828847050666809, -0.49131113290786743, 0.2825426757335663, 0.39074432849884033, -0.04016276076436043, -0.6405179500579834, -0.7078503966331482, -0.13653385639190674, -0.3705330491065979, -0.6007258892059326, 0.31727084517478943, -0.16885103285312653, -0.2304765284061432, 0.05572723224759102, -0.14777851104736328, -0.09872733801603317, 0.17269933223724365, 0.3855224847793579, 0.5913364887237549, -0.05973193421959877, -0.4091157615184784, -0.16650986671447754, -0.1529933512210846, 0.1273917257785797, 0.046161748468875885, 0.5553967952728271, -0.3246625065803528, -0.2599629759788513, -0.21422003209590912, 0.10901094973087311, 0.462236225605011, -0.09700009226799011, 0.7976726293563843, 0.7227136492729187, -0.28279632329940796, 0.012746844440698624, -0.464561402797699, 0.08919951319694519, -0.49986910820007324, 0.296204149723053, -0.34557226300239563, -0.7933603525161743, 0.8229333162307739, 0.25671932101249695, 0.07294332981109619, 0.7487203478813171, 0.6104474663734436, 0.08644655346870422, 0.797730028629303, 0.12858285009860992, -0.1318146139383316, 0.49995794892311096, -0.8363854885101318, -0.08553003519773483, -1.1679811477661133, -0.4476745128631592, -0.507026195526123, -0.33975231647491455, -0.8161998987197876, -0.3048810064792633, 0.3072279393672943, 0.23801358044147491, -0.44093579053878784, 0.5335150361061096, -0.709443211555481, 0.24508072435855865, 0.6454266309738159, 0.1319272518157959, -0.015529136173427105, -0.10128428041934967, -0.11053210496902466, 0.25178852677345276, -0.5320805907249451, -0.34901535511016846, 1.3466756343841553, 0.18745237588882446, 0.6541479825973511, 0.03185069188475609, 0.9958207011222839, 0.24663197994232178, 0.4022045135498047, -0.41636836528778076, 0.5661723017692566, -0.006382807623594999, -0.6113055348396301, -0.1290307492017746, -0.7239412069320679, -0.9813353419303894, 0.2647069990634918, 0.05111085996031761, -0.901153564453125, 0.024495432153344154, 0.041761379688978195, -0.004253258462995291, 0.32950589060783386, -0.5917535424232483, 0.8423095345497131, -0.31043994426727295, -0.3293256163597107, 0.1397937685251236, -0.859130322933197, 0.4195314347743988, 0.09732174128293991, 0.38934940099716187, -0.2697570025920868, 0.04364967346191406, 1.1864347457885742, -0.5640318393707275, 0.6752796769142151, -0.31007736921310425, 0.1353556513786316, 0.39491796493530273, -0.34536421298980713, 0.532844603061676, -0.015920568257570267, -0.22974321246147156, 0.48983830213546753, -0.09630494564771652, -0.2862115800380707, -0.3283398449420929, 0.9968551993370056, -0.9561892747879028, -0.3334723711013794, -0.5267024040222168, -0.5081943273544312, 0.26457661390304565, 0.3359452188014984, 0.32460471987724304, 0.4156467318534851, 0.14827574789524078, 0.243867889046669, 0.2505016326904297, -0.1801525205373764, 0.48288553953170776, 0.4183695912361145, -0.17648343741893768, -0.8904398679733276, 0.7263096570968628, 0.31855010986328125, 0.0871352106332779, 0.18236589431762695, 0.08036307245492935, -0.5673871040344238, -0.5190891027450562, -0.45094770193099976, 0.33228418231010437, -0.6268851161003113, -0.4233032166957855, -0.43505749106407166, -0.2169378399848938, -0.5027126669883728, -0.08052633702754974, -0.32865750789642334, -0.3953079879283905, -0.44382116198539734, -0.31429919600486755, 0.601784884929657, 0.6351228952407837, -0.3748466372489929, 0.3229345977306366, -0.774467408657074, 0.31960099935531616, -0.18272249400615692, 0.44654330611228943, -0.1434108465909958, -0.5546177625656128, -0.39485520124435425, 0.15273775160312653, -0.3939278721809387, -0.8841444253921509, 0.640019953250885, -0.04007868096232414, 0.7684243321418762, 0.10567644983530045, 0.0774126648902893, 0.884335994720459, -0.19911976158618927, 1.0335378646850586, -0.00671753752976656, -0.7729843258857727, 0.7909757494926453, -0.3494141697883606, 0.13463130593299866, 0.5431551933288574, 0.20855526626110077, -0.4993950128555298, -0.2877736985683441, -1.0242522954940796, -1.193124532699585, 1.0532373189926147, 0.579682469367981, -0.3940328359603882, 0.0741862878203392, 0.3719663619995117, -0.07022633403539658, 0.28358206152915955, -0.6331683993339539, -0.7028976678848267, -0.2614709138870239, -0.3310336172580719, -0.07383747398853302, 0.04676278308033943, -0.4340856671333313, -0.440096914768219, 0.9492852091789246, -0.03574921190738678, 0.51670902967453, 0.20148493349552155, -0.0165815781801939, -0.0668894425034523, 0.22939971089363098, 0.49667277932167053, 0.7312570214271545, -0.43771833181381226, -0.014034954831004143, 0.23536896705627441, -0.5651999711990356, 0.08253827691078186, 0.39561161398887634, 0.017399078235030174, -0.09483276307582855, 0.6785765290260315, 0.9932645559310913, 0.03587872162461281, -0.3659248948097229, 0.5050487518310547, 0.06290943175554276, -0.33630192279815674, -0.44588300585746765, 0.2102702409029007, -0.07094882428646088, 0.3431190252304077, 0.4267202615737915, -0.06812483817338943, 0.026674745604395866, -0.2656075954437256, 0.2730559706687927, 0.2273685187101364, -0.038824986666440964, -0.2869027853012085, 0.6232186555862427, -0.04133134335279465, -0.2695317566394806, 0.7538590431213379, -0.1497785598039627, -0.6733845472335815, 1.1235120296478271, 0.3751067817211151, 0.8439738750457764, -0.13352565467357635, 0.09280441701412201, 0.5421394109725952, 0.38586023449897766, -0.18834726512432098, 0.6088423728942871, 0.11650809645652771, -0.571506679058075, -0.24803459644317627, -0.792427122592926, -0.22536543011665344, 0.3247421979904175, -1.0793181657791138, 0.30274608731269836, -0.11837369203567505, -0.20101621747016907, -0.1880766749382019, 0.46754539012908936, -0.8950979709625244, 0.20027105510234833, -0.016272854059934616, 0.8724048137664795, -1.0637060403823853, 0.6670897603034973, 0.8102741241455078, -0.4295535087585449, -0.9560760259628296, -0.3047334551811218, 0.09646059572696686, -0.843978762626648, 0.4877752959728241, 0.33641767501831055, 0.4039047360420227, -0.10049276798963547, -0.6002597808837891, -1.0414447784423828, 1.5677000284194946, 0.1442178636789322, -0.45439785718917847, 0.18291550874710083, 0.08889784663915634, 0.37358251214027405, -0.23181816935539246, 0.6051151752471924, 0.7287417054176331, 0.7873400449752808, -0.03101934678852558, -0.9441590905189514, 0.28763625025749207, -0.530662477016449, -0.011990517377853394, 0.41011202335357666, -0.8560494780540466, 1.004284381866455, -0.07111777365207672, 0.06476765125989914, -0.09279167652130127, 0.3325481414794922, 0.6011892557144165, 0.2763824462890625, 0.4543795585632324, 0.7853493094444275, 0.6891388297080994, -0.40871119499206543, 1.0862685441970825, -0.2857375144958496, 0.8890560865402222, 0.9816797971725464, 0.0031219650991261005, 0.6575205326080322, 0.32535141706466675, -0.4604780673980713, 0.49032536149024963, 0.8036196827888489, -0.4099161624908447, 0.483608603477478, 0.1746271848678589, -0.007690573576837778, 0.046828389167785645, 0.0845746323466301, -0.5414052605628967, 0.3795962333679199, 0.24901652336120605, -0.48318037390708923, -0.14980538189411163, -0.24038004875183105, 0.14454129338264465, -0.4069279134273529, -0.21357332170009613, 0.6182839274406433, -0.08025195449590683, -0.5871800184249878, 0.828070878982544, -0.06462205201387405, 0.6588783264160156, -0.7100731730461121, -0.16082346439361572, -0.2087453007698059, 0.21970395743846893, -0.5266828536987305, -0.9945866465568542, 0.24759529531002045, 0.10755279660224915, -0.13975657522678375, -0.2539883553981781, 0.5997510552406311, -0.3332469165325165, -0.579782247543335, 0.40542709827423096, 0.3517167866230011, 0.33439338207244873, 0.13867637515068054, -0.9422838687896729, 0.3581058382987976, 0.3211826682090759, -0.8202743530273438, 0.3631608486175537, 0.29467257857322693, 0.14635461568832397, 0.545238733291626, 0.7880473732948303, 0.25893834233283997, 0.14476129412651062, -0.0891207903623581, 1.1061813831329346, -0.7989287972450256, -0.37850385904312134, -0.8129333853721619, 0.8180826306343079, -0.26470494270324707, -0.6896488070487976, 0.7983567714691162, 0.9385606646537781, 0.8986261487007141, 0.1515108048915863, 0.9052772521972656, -0.5234903693199158, 0.44933566451072693, -0.4581565856933594, 0.7683448195457458, -0.6517964005470276, 0.3372795581817627, -0.18787996470928192, -0.8069261908531189, -0.10136644542217255, 0.763575553894043, -0.1724817454814911, 0.021687453612685204, 0.5812262296676636, 0.9296708106994629, 0.05911383777856827, 0.08418799936771393, -0.08062641322612762, 0.46398046612739563, 0.45385226607322693, 0.5969511866569519, 0.5871148109436035, -0.6645390391349792, 0.4629453122615814, -0.6541523337364197, -0.4480110704898834, -0.2111796736717224, -0.7100847363471985, -0.8210522532463074, -0.5056003332138062, -0.31686919927597046, -0.5286157131195068, 0.030966805294156075, 1.0361870527267456, 0.4719856381416321, -0.8121244311332703, -0.4218684434890747, -0.021480215713381767, 0.18663127720355988, -0.2479935884475708, -0.3650828003883362, 0.5750647187232971, -0.018554292619228363, -0.7029358148574829, 0.31698915362358093, -0.12918007373809814, -0.049788035452365875, -0.005477211903780699, -0.2517739534378052, -0.3459744155406952, -0.3461580276489258, 0.3337872326374054, 0.1359928548336029, -0.7276573181152344, -0.35332193970680237, -0.14986726641654968, 0.011290312744677067, 0.2660267949104309, 0.3036140501499176, -0.6011576056480408, 0.09867773205041885, 0.5774107575416565, 0.2632659673690796, 0.7004151344299316, 0.0981621965765953, 0.1805146038532257, -0.8084549903869629, -0.015515878796577454, 0.026301782578229904, 0.5264914631843567, 0.24652422964572906, -0.45054155588150024, 1.006746530532837, 0.4015321135520935, -0.7828308939933777, -0.9521570801734924, -0.2586202323436737, -1.1912500858306885, -0.028700977563858032, 1.4280294179916382, -0.2985058128833771, -0.3070763945579529, 0.07945644855499268, -0.2079354226589203, 0.3578968942165375, -0.6933128833770752, 0.5053811073303223, 0.7405189275741577, -0.41756877303123474, 0.018845465034246445, -0.6815556287765503, 0.27363157272338867, -0.04578765109181404, -1.0231441259384155, 0.014189022593200207, 0.3360336422920227, 0.43439292907714844, 0.2181525081396103, 0.664252519607544, 0.00802962388843298, -0.1935579478740692, -0.013304552063345909, 0.17038749158382416, -0.32053738832473755, -0.16104376316070557, -0.2155214548110962, 0.1119624525308609, -0.38543009757995605, -0.4355314075946808 ]
open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-1.3
open-llm-leaderboard
2023-10-19T00:22:36Z
200
0
[ "region:us" ]
null
2023-08-18T11:21:30Z
--- pretty_name: Evaluation run of jondurbin/airoboros-65b-gpt4-1.3 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [jondurbin/airoboros-65b-gpt4-1.3](https://huggingface.co/jondurbin/airoboros-65b-gpt4-1.3)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-1.3\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T00:22:24.283273](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-1.3/blob/main/results_2023-10-19T00-22-24.283273.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.40635486577181207,\n\ \ \"em_stderr\": 0.00502985933530148,\n \"f1\": 0.49071728187919794,\n\ \ \"f1_stderr\": 0.0047528105237378505,\n \"acc\": 0.4679967304402357,\n\ \ \"acc_stderr\": 0.010353850140010314\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.40635486577181207,\n \"em_stderr\": 0.00502985933530148,\n\ \ \"f1\": 0.49071728187919794,\n \"f1_stderr\": 0.0047528105237378505\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.13646702047005307,\n \ \ \"acc_stderr\": 0.00945574199881554\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7995264404104183,\n \"acc_stderr\": 0.011251958281205085\n\ \ }\n}\n```" repo_url: https://huggingface.co/jondurbin/airoboros-65b-gpt4-1.3 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|arc:challenge|25_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-09T14:21:18.857678.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T00_22_24.283273 path: - '**/details_harness|drop|3_2023-10-19T00-22-24.283273.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T00-22-24.283273.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T00_22_24.283273 path: - '**/details_harness|gsm8k|5_2023-10-19T00-22-24.283273.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T00-22-24.283273.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hellaswag|10_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-09T14:21:18.857678.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-management|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-09T14:21:18.857678.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_09T14_21_18.857678 path: - '**/details_harness|truthfulqa:mc|0_2023-08-09T14:21:18.857678.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-09T14:21:18.857678.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T00_22_24.283273 path: - '**/details_harness|winogrande|5_2023-10-19T00-22-24.283273.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T00-22-24.283273.parquet' - config_name: results data_files: - split: 2023_08_09T14_21_18.857678 path: - results_2023-08-09T14:21:18.857678.parquet - split: 2023_10_19T00_22_24.283273 path: - results_2023-10-19T00-22-24.283273.parquet - split: latest path: - results_2023-10-19T00-22-24.283273.parquet --- # Dataset Card for Evaluation run of jondurbin/airoboros-65b-gpt4-1.3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-65b-gpt4-1.3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-65b-gpt4-1.3](https://huggingface.co/jondurbin/airoboros-65b-gpt4-1.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-1.3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T00:22:24.283273](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-1.3/blob/main/results_2023-10-19T00-22-24.283273.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.40635486577181207, "em_stderr": 0.00502985933530148, "f1": 0.49071728187919794, "f1_stderr": 0.0047528105237378505, "acc": 0.4679967304402357, "acc_stderr": 0.010353850140010314 }, "harness|drop|3": { "em": 0.40635486577181207, "em_stderr": 0.00502985933530148, "f1": 0.49071728187919794, "f1_stderr": 0.0047528105237378505 }, "harness|gsm8k|5": { "acc": 0.13646702047005307, "acc_stderr": 0.00945574199881554 }, "harness|winogrande|5": { "acc": 0.7995264404104183, "acc_stderr": 0.011251958281205085 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.46321672201156616, -0.657116174697876, 0.1442183256149292, 0.19013215601444244, -0.1781131625175476, 0.06490694731473923, -0.34545469284057617, -0.20295432209968567, 0.3760596811771393, 0.5290741324424744, -0.6437811255455017, -0.9171037077903748, -0.6984078288078308, 0.19719287753105164, -0.1496511548757553, 1.2330046892166138, -0.3458683490753174, -0.30184102058410645, 0.0778426080942154, -0.28058895468711853, -0.31796199083328247, -0.45027366280555725, -0.5149338841438293, -0.40955665707588196, 0.36997443437576294, 0.4802766740322113, 0.3970947265625, 0.744300127029419, 0.6446929574012756, 0.3840245008468628, -0.1493237316608429, 0.14354026317596436, -0.4009440243244171, -0.1582603007555008, 0.27723750472068787, -0.5680687427520752, -0.7852856516838074, 0.1580296903848648, 0.6820289492607117, 0.4406593143939972, -0.2933858335018158, 0.5496966242790222, 0.09417842328548431, 0.5876805782318115, -0.41175317764282227, 0.40366384387016296, -0.3612009286880493, -0.0354309156537056, -0.39143046736717224, -0.20529720187187195, 0.03679979592561722, -0.34302255511283875, -0.13163933157920837, -0.549892783164978, 0.24396923184394836, 0.12160710990428925, 1.1185202598571777, 0.13730601966381073, -0.17629988491535187, -0.2141328752040863, -0.24304616451263428, 0.7910152077674866, -0.9324689507484436, 0.05076255649328232, 0.6548370122909546, 0.0804729238152504, -0.1950138956308365, -0.5510208606719971, -0.34669095277786255, -0.06318164616823196, -0.2798216640949249, 0.20035585761070251, -0.06588993221521378, -0.07711215317249298, 0.4410627782344818, 0.5965897440910339, -0.7874895930290222, 0.016188915818929672, -0.606748640537262, -0.16582074761390686, 0.8910648822784424, 0.3163996636867523, 0.049010224640369415, -0.5471422672271729, -0.3830869495868683, -0.3402547240257263, -0.45161789655685425, 0.1767714023590088, 0.5680464506149292, 0.45114755630493164, -0.6363234519958496, 0.7122955322265625, -0.4317743182182312, 0.621476948261261, -0.12140434980392456, -0.26100796461105347, 0.8834890127182007, -0.5893113613128662, -0.2842423617839813, 0.02100501023232937, 1.0271413326263428, 0.40898406505584717, 0.01070195622742176, 0.23107153177261353, -0.282296359539032, -0.12096219509840012, 0.08462746441364288, -0.7950575947761536, -0.2248787134885788, 0.35784584283828735, -0.5582892894744873, -0.42515942454338074, 0.298797070980072, -0.8825483322143555, -0.07034721970558167, -0.2622116208076477, 0.20302033424377441, -0.1945166140794754, -0.4676009714603424, -0.10243722051382065, -0.1828153431415558, 0.24050042033195496, 0.125896155834198, -0.6385625600814819, 0.379905641078949, 0.5799745321273804, 1.0136784315109253, -0.11090414971113205, -0.41204380989074707, -0.2917206287384033, -0.1564091593027115, -0.1944471001625061, 0.4937739968299866, -0.1653379201889038, -0.42488715052604675, -0.21200944483280182, 0.2584449052810669, -0.3148372173309326, -0.6033462285995483, 0.6383547186851501, -0.26490479707717896, 0.23199456930160522, -0.24266403913497925, -0.4056529402732849, -0.1343175172805786, 0.42375946044921875, -0.6672102212905884, 1.3844832181930542, 0.4295816421508789, -0.8844407200813293, 0.10531000792980194, -0.8450703620910645, -0.20592382550239563, 0.06721513718366623, -0.013329070061445236, -0.5936034917831421, -0.20194023847579956, 0.2047017216682434, 0.5701752305030823, -0.32554489374160767, 0.11571792513132095, -0.27128681540489197, -0.3962964415550232, 0.11705198138952255, -0.1574861705303192, 1.0610607862472534, 0.22636468708515167, -0.4588140547275543, 0.08658333867788315, -0.8899462223052979, 0.1483796238899231, 0.2737785875797272, -0.5706688165664673, -0.15539860725402832, -0.3257797062397003, 0.08921974897384644, 0.10170534253120422, 0.47716888785362244, -0.4801318645477295, 0.3883265554904938, -0.2368522435426712, 0.287030965089798, 1.0087815523147583, 0.040608011186122894, 0.139949232339859, -0.4640794098377228, 0.658980131149292, 0.0187973789870739, 0.30306270718574524, 0.14016631245613098, -0.5441365838050842, -0.6746669411659241, -0.23742429912090302, 0.13871701061725616, 0.6418484449386597, -0.5803287625312805, 0.7164226174354553, -0.3970070779323578, -0.7008434534072876, -0.6586700081825256, 0.17870020866394043, 0.4876447021961212, 0.5140294432640076, 0.39384493231773376, -0.22199970483779907, -0.6887933611869812, -0.9770841598510742, -0.0102050406858325, -0.33111435174942017, 0.07251512259244919, 0.5095870494842529, 0.9857937097549438, -0.3237307369709015, 0.5970674753189087, -0.7951413989067078, -0.2897600829601288, -0.29112303256988525, 0.12022705376148224, 0.803479790687561, 0.5172061324119568, 0.5045613050460815, -0.6966531872749329, -0.3620542585849762, 0.014261610805988312, -0.7785273194313049, -0.3085288107395172, -0.07824814319610596, -0.2452104687690735, 0.371390700340271, -0.03329398110508919, -0.5499314069747925, 0.5219852924346924, 0.5995011329650879, -0.5842675566673279, 0.6693199872970581, -0.0334293507039547, 0.4480232298374176, -1.1111782789230347, 0.2091100960969925, 0.0948735699057579, 0.04138386249542236, -0.40650177001953125, -0.15849770605564117, -0.02039620839059353, 0.37116119265556335, -0.42182913422584534, 0.6610530614852905, -0.41535642743110657, -0.16200441122055054, 0.058965861797332764, 0.10879989713430405, -0.043734125792980194, 0.5619581341743469, -0.32861700654029846, 0.7556608319282532, 0.5072776079177856, -0.34947144985198975, 0.467742383480072, 0.46678444743156433, -0.4621221125125885, 0.3144869804382324, -0.5429961085319519, -0.04520457237958908, 0.17434246838092804, 0.2122483253479004, -0.8651428818702698, -0.4175299108028412, 0.4632803201675415, -0.5830163955688477, 0.2632487118244171, -0.3666742146015167, -0.5556973814964294, -0.530910313129425, -0.5012823939323425, 0.2261693924665451, 0.4742942452430725, -0.4695482552051544, 0.26212742924690247, 0.3995985686779022, -0.06226257607340813, -0.619080126285553, -0.7082965970039368, -0.1681232452392578, -0.3749728202819824, -0.591179609298706, 0.33419069647789, -0.1812209188938141, -0.24211958050727844, 0.05605123192071915, -0.1478099226951599, -0.10744914412498474, 0.16061362624168396, 0.35244786739349365, 0.6046724915504456, -0.07464759051799774, -0.37200525403022766, -0.19111239910125732, -0.13992968201637268, 0.13811786472797394, 0.07542011886835098, 0.524653971195221, -0.3478032946586609, -0.23578885197639465, -0.2343410849571228, 0.09460286051034927, 0.4571880102157593, -0.10607588291168213, 0.7733566164970398, 0.7120311260223389, -0.26194295287132263, 0.0157855786383152, -0.43914365768432617, 0.0842527523636818, -0.49870437383651733, 0.3122237026691437, -0.34092414379119873, -0.7981330752372742, 0.7943686842918396, 0.2656736373901367, 0.06686197966337204, 0.7570073008537292, 0.5823098421096802, 0.0701184868812561, 0.7881017923355103, 0.14098405838012695, -0.14557869732379913, 0.5063401460647583, -0.8244842886924744, -0.09826378524303436, -1.1594188213348389, -0.436612993478775, -0.4916422963142395, -0.3514575958251953, -0.8382286429405212, -0.3283126950263977, 0.30407705903053284, 0.22509321570396423, -0.4349185824394226, 0.5257089734077454, -0.6969070434570312, 0.24371859431266785, 0.6323110461235046, 0.16029947996139526, -0.005086343735456467, -0.10893335193395615, -0.09065867215394974, 0.2295503318309784, -0.5252380967140198, -0.36140528321266174, 1.3384252786636353, 0.230805441737175, 0.6491848230361938, 0.042345788329839706, 0.9770171642303467, 0.24564509093761444, 0.389909952878952, -0.43659523129463196, 0.5883841514587402, 0.0002474348875693977, -0.6168189644813538, -0.1619783341884613, -0.7235597372055054, -0.9417924880981445, 0.2521648406982422, 0.05314517021179199, -0.890311062335968, -0.0019373722607269883, 0.022965990006923676, -0.01812824420630932, 0.34093430638313293, -0.5917807817459106, 0.8466335535049438, -0.32168906927108765, -0.3452141582965851, 0.1264776736497879, -0.849963366985321, 0.41490256786346436, 0.06340744346380234, 0.3506675660610199, -0.2844945192337036, 0.018786894157528877, 1.193527102470398, -0.5837202072143555, 0.6847888827323914, -0.32506054639816284, 0.10330985486507416, 0.39018043875694275, -0.36404985189437866, 0.5618396401405334, -0.02640495076775551, -0.25024592876434326, 0.49802789092063904, -0.116142638027668, -0.27517110109329224, -0.308442622423172, 0.9797611832618713, -0.9522258639335632, -0.3342120349407196, -0.5224805474281311, -0.5087293982505798, 0.2472490817308426, 0.3109886646270752, 0.32453927397727966, 0.3815096318721771, 0.12085021287202835, 0.23991848528385162, 0.2413937747478485, -0.16407707333564758, 0.4921633005142212, 0.4381270408630371, -0.17876654863357544, -0.8526535630226135, 0.6980682611465454, 0.3030140995979309, 0.08040712028741837, 0.18167565762996674, 0.06190342828631401, -0.5436031222343445, -0.5065473318099976, -0.41446539759635925, 0.3303171396255493, -0.6158984899520874, -0.4173104166984558, -0.4445594847202301, -0.2216351181268692, -0.4444647431373596, -0.06069526448845863, -0.3251720368862152, -0.4134488105773926, -0.4569503366947174, -0.33308935165405273, 0.6323474049568176, 0.6177741885185242, -0.3791015148162842, 0.30519136786460876, -0.7495211362838745, 0.2858608067035675, -0.15399131178855896, 0.489093542098999, -0.17839953303337097, -0.5507396459579468, -0.39666110277175903, 0.14225716888904572, -0.40173131227493286, -0.8550243973731995, 0.6045014262199402, -0.00555235892534256, 0.7457271218299866, 0.10786587744951248, 0.0735333189368248, 0.8663145303726196, -0.21122987568378448, 1.0664249658584595, -0.006161230616271496, -0.7662801146507263, 0.7988312840461731, -0.3627385199069977, 0.12089475989341736, 0.5385342836380005, 0.23252099752426147, -0.44792643189430237, -0.2776826322078705, -1.0085015296936035, -1.1826547384262085, 1.0677281618118286, 0.5686396360397339, -0.36990857124328613, 0.08736030012369156, 0.36712855100631714, -0.05254846066236496, 0.24894478917121887, -0.6714624762535095, -0.7083789110183716, -0.2420358508825302, -0.2929546535015106, -0.0803605169057846, 0.041667740792036057, -0.4346481263637543, -0.4624565839767456, 0.9632654190063477, -0.04283224418759346, 0.5228832364082336, 0.19391313195228577, 0.021485796198248863, -0.08562184125185013, 0.22788237035274506, 0.4960179030895233, 0.7174040079116821, -0.4511723518371582, -0.03813493624329567, 0.24243779480457306, -0.5618892312049866, 0.07693346589803696, 0.41032102704048157, 0.03815474733710289, -0.09157741069793701, 0.6611264944076538, 1.0020490884780884, 0.05311289429664612, -0.3449344336986542, 0.530695378780365, 0.06404513120651245, -0.36087754368782043, -0.4387621283531189, 0.21764656901359558, -0.10148951411247253, 0.34654632210731506, 0.4255353808403015, -0.05539898946881294, 0.009039853699505329, -0.25564032793045044, 0.2964946925640106, 0.24552226066589355, -0.040862131863832474, -0.3022618591785431, 0.6263599991798401, -0.026749618351459503, -0.26723381876945496, 0.7407978773117065, -0.1332276463508606, -0.6520780920982361, 1.0912635326385498, 0.37134218215942383, 0.8810784816741943, -0.12225373089313507, 0.12102500349283218, 0.5679498314857483, 0.35888463258743286, -0.1829017847776413, 0.5952541828155518, 0.10293473303318024, -0.5847940444946289, -0.2637941241264343, -0.7825760245323181, -0.2364194691181183, 0.34906914830207825, -1.0874967575073242, 0.32212284207344055, -0.11246982216835022, -0.21530044078826904, -0.16904523968696594, 0.4660830795764923, -0.8932507634162903, 0.1921687126159668, -0.01210710033774376, 0.8764074444770813, -1.0560405254364014, 0.6557446122169495, 0.8087670207023621, -0.4387784004211426, -0.9514429569244385, -0.2556908428668976, 0.09489332884550095, -0.8199365735054016, 0.4840981066226959, 0.31730109453201294, 0.3827461898326874, -0.07940744608640671, -0.6263207793235779, -1.0338020324707031, 1.5707223415374756, 0.12157673388719559, -0.44576218724250793, 0.17578592896461487, 0.0565899983048439, 0.38930293917655945, -0.23664018511772156, 0.5818546414375305, 0.7314246892929077, 0.7797859311103821, -0.06212131306529045, -0.9506770968437195, 0.2947462201118469, -0.5500969290733337, -0.022151552140712738, 0.392566442489624, -0.8709897398948669, 1.0211513042449951, -0.09617002308368683, 0.05108245462179184, -0.0907447338104248, 0.32761362195014954, 0.6020855903625488, 0.26481813192367554, 0.44914597272872925, 0.7751792669296265, 0.6877002120018005, -0.42789822816848755, 1.094092607498169, -0.27047351002693176, 0.8901185393333435, 1.014582872390747, -0.0012556349392980337, 0.6784117221832275, 0.3369150459766388, -0.5192701816558838, 0.5338768362998962, 0.7986063361167908, -0.41585609316825867, 0.5128414630889893, 0.15930993854999542, -0.025616902858018875, 0.0549645833671093, 0.09968049079179764, -0.5498813986778259, 0.40256673097610474, 0.23071664571762085, -0.4801791310310364, -0.13138547539710999, -0.24928635358810425, 0.15866951644420624, -0.4178605079650879, -0.19908346235752106, 0.6194217801094055, -0.07880157232284546, -0.5964510440826416, 0.812451183795929, -0.0716148316860199, 0.6561891436576843, -0.7203174829483032, -0.15642687678337097, -0.21406063437461853, 0.20629103481769562, -0.5244994163513184, -0.9964196681976318, 0.21951748430728912, 0.12292744964361191, -0.16115102171897888, -0.26270121335983276, 0.5682872533798218, -0.31593748927116394, -0.5480930209159851, 0.3906441032886505, 0.3565349578857422, 0.34772953391075134, 0.14579324424266815, -0.9343209266662598, 0.32440420985221863, 0.3215658366680145, -0.8406385779380798, 0.3409343361854553, 0.28079545497894287, 0.15508723258972168, 0.5341995358467102, 0.7725145220756531, 0.23172849416732788, 0.10818701982498169, -0.08349835872650146, 1.1006499528884888, -0.7853964567184448, -0.38344806432724, -0.8366886973381042, 0.8391829133033752, -0.25743892788887024, -0.6814819574356079, 0.824148416519165, 0.9166379570960999, 0.8793171048164368, 0.16585712134838104, 0.9111208915710449, -0.530474066734314, 0.4525098502635956, -0.45458659529685974, 0.7527444958686829, -0.6623733043670654, 0.37272655963897705, -0.21333356201648712, -0.7942104339599609, -0.09792817384004593, 0.750662088394165, -0.184858500957489, 0.0554535835981369, 0.5958386063575745, 0.9158415794372559, 0.07815773785114288, 0.11011245846748352, -0.10963165014982224, 0.45906904339790344, 0.43307945132255554, 0.5971008539199829, 0.5724637508392334, -0.6748865842819214, 0.45949336886405945, -0.6437447667121887, -0.4589478075504303, -0.19446054100990295, -0.7137975692749023, -0.7931288480758667, -0.4975714385509491, -0.342273086309433, -0.5294005870819092, -0.01606987789273262, 1.0238620042800903, 0.4732396900653839, -0.8041626811027527, -0.3855515122413635, -0.04120888188481331, 0.15720567107200623, -0.23662380874156952, -0.3638400733470917, 0.5827951431274414, -0.017800142988562584, -0.7103824615478516, 0.31231117248535156, -0.14467960596084595, -0.053992532193660736, -0.0200953371822834, -0.27783191204071045, -0.37052544951438904, -0.36105790734291077, 0.32979604601860046, 0.12903669476509094, -0.7256045341491699, -0.3365766108036041, -0.1572764366865158, -0.0053204698488116264, 0.27757081389427185, 0.2814929485321045, -0.601375937461853, 0.09117616713047028, 0.5631027221679688, 0.22724001109600067, 0.7194796800613403, 0.1003497913479805, 0.18810926377773285, -0.8395019769668579, -0.035726018249988556, 0.03264754265546799, 0.5149112343788147, 0.23901140689849854, -0.46022525429725647, 1.0432796478271484, 0.3855721652507782, -0.7738845348358154, -0.9302783608436584, -0.2604820728302002, -1.1697907447814941, -0.020736103877425194, 1.4408646821975708, -0.29359716176986694, -0.2620426416397095, 0.11799757182598114, -0.1809639036655426, 0.350801557302475, -0.6798006296157837, 0.46914803981781006, 0.7584255933761597, -0.42916321754455566, -0.0018319578375667334, -0.6900913119316101, 0.29683399200439453, -0.02517102286219597, -1.018630862236023, -0.008336632512509823, 0.33331847190856934, 0.43350398540496826, 0.21762268245220184, 0.702785313129425, -0.01881638541817665, -0.1869053989648819, -0.031008722260594368, 0.17585694789886475, -0.297325074672699, -0.136151984333992, -0.22753405570983887, 0.10268393903970718, -0.3913472592830658, -0.43702250719070435 ]
open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4-1.4-fp16
open-llm-leaderboard
2023-10-19T14:04:53Z
200
0
[ "region:us" ]
null
2023-08-18T11:21:48Z
--- pretty_name: Evaluation run of jondurbin/airoboros-13b-gpt4-1.4-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [jondurbin/airoboros-13b-gpt4-1.4-fp16](https://huggingface.co/jondurbin/airoboros-13b-gpt4-1.4-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4-1.4-fp16\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T14:04:40.493722](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4-1.4-fp16/blob/main/results_2023-10-19T14-04-40.493722.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.05285234899328859,\n\ \ \"em_stderr\": 0.0022912930700355423,\n \"f1\": 0.11820364932885902,\n\ \ \"f1_stderr\": 0.0026017641356238645,\n \"acc\": 0.41988112541310807,\n\ \ \"acc_stderr\": 0.009659506214512746\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.05285234899328859,\n \"em_stderr\": 0.0022912930700355423,\n\ \ \"f1\": 0.11820364932885902,\n \"f1_stderr\": 0.0026017641356238645\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07733131159969674,\n \ \ \"acc_stderr\": 0.007357713523222348\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803143\n\ \ }\n}\n```" repo_url: https://huggingface.co/jondurbin/airoboros-13b-gpt4-1.4-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|arc:challenge|25_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-03T11:11:18.095380.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T14_04_40.493722 path: - '**/details_harness|drop|3_2023-10-19T14-04-40.493722.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T14-04-40.493722.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T14_04_40.493722 path: - '**/details_harness|gsm8k|5_2023-10-19T14-04-40.493722.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T14-04-40.493722.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hellaswag|10_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-03T11:11:18.095380.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-management|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-03T11:11:18.095380.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_03T11_11_18.095380 path: - '**/details_harness|truthfulqa:mc|0_2023-08-03T11:11:18.095380.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-03T11:11:18.095380.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T14_04_40.493722 path: - '**/details_harness|winogrande|5_2023-10-19T14-04-40.493722.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T14-04-40.493722.parquet' - config_name: results data_files: - split: 2023_08_03T11_11_18.095380 path: - results_2023-08-03T11:11:18.095380.parquet - split: 2023_10_19T14_04_40.493722 path: - results_2023-10-19T14-04-40.493722.parquet - split: latest path: - results_2023-10-19T14-04-40.493722.parquet --- # Dataset Card for Evaluation run of jondurbin/airoboros-13b-gpt4-1.4-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-13b-gpt4-1.4-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-13b-gpt4-1.4-fp16](https://huggingface.co/jondurbin/airoboros-13b-gpt4-1.4-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4-1.4-fp16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T14:04:40.493722](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4-1.4-fp16/blob/main/results_2023-10-19T14-04-40.493722.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.05285234899328859, "em_stderr": 0.0022912930700355423, "f1": 0.11820364932885902, "f1_stderr": 0.0026017641356238645, "acc": 0.41988112541310807, "acc_stderr": 0.009659506214512746 }, "harness|drop|3": { "em": 0.05285234899328859, "em_stderr": 0.0022912930700355423, "f1": 0.11820364932885902, "f1_stderr": 0.0026017641356238645 }, "harness|gsm8k|5": { "acc": 0.07733131159969674, "acc_stderr": 0.007357713523222348 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803143 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.46527406573295593, -0.6710694432258606, 0.15051274001598358, 0.1941043883562088, -0.16174617409706116, 0.08294954150915146, -0.35694292187690735, -0.21245421469211578, 0.38538405299186707, 0.5148485898971558, -0.6578466892242432, -0.8927939534187317, -0.7205398678779602, 0.2055189460515976, -0.16229397058486938, 1.2053953409194946, -0.3317936658859253, -0.3162471055984497, 0.07729440182447433, -0.2564745545387268, -0.3117656409740448, -0.43927714228630066, -0.49042510986328125, -0.41757267713546753, 0.36382824182510376, 0.4791457951068878, 0.3893590271472931, 0.7257447838783264, 0.6338954567909241, 0.3940959572792053, -0.15277647972106934, 0.16520120203495026, -0.3966658115386963, -0.1694001853466034, 0.2570655643939972, -0.516276478767395, -0.756966769695282, 0.16495035588741302, 0.6788352131843567, 0.45064905285835266, -0.30252647399902344, 0.5309616923332214, 0.10716037452220917, 0.5739334225654602, -0.38889676332473755, 0.3837668299674988, -0.3720611333847046, -0.05050881952047348, -0.3543669581413269, -0.22555449604988098, 0.02704298123717308, -0.297471284866333, -0.11312031000852585, -0.557226300239563, 0.23123137652873993, 0.1114312931895256, 1.1313046216964722, 0.14083915948867798, -0.18690839409828186, -0.2532595098018646, -0.24100108444690704, 0.7989304065704346, -0.9164130091667175, 0.08885777741670609, 0.6442577242851257, 0.07178346067667007, -0.20691971480846405, -0.5528585910797119, -0.3220826983451843, -0.09646914899349213, -0.27179303765296936, 0.19942845404148102, -0.07191727310419083, -0.07084544748067856, 0.43153077363967896, 0.6024887561798096, -0.7650781273841858, 0.009592217393219471, -0.5979464650154114, -0.16184990108013153, 0.9064016342163086, 0.29697296023368835, 0.07003249228000641, -0.5485472679138184, -0.38095995783805847, -0.3436635434627533, -0.437237411737442, 0.17946773767471313, 0.5668023824691772, 0.46164822578430176, -0.6345488429069519, 0.7159757018089294, -0.41580846905708313, 0.6323937177658081, -0.08233758807182312, -0.26390933990478516, 0.8606758117675781, -0.622672438621521, -0.2854033410549164, 0.01713705062866211, 1.0376287698745728, 0.40900754928588867, -0.006194095127284527, 0.21736422181129456, -0.2945987284183502, -0.12714546918869019, 0.1013118326663971, -0.8145374655723572, -0.2334475815296173, 0.3413220942020416, -0.5696884393692017, -0.42810681462287903, 0.28386810421943665, -0.8981175422668457, -0.059437282383441925, -0.26798757910728455, 0.22621086239814758, -0.20859241485595703, -0.4668392539024353, -0.09717175364494324, -0.1869400143623352, 0.22231602668762207, 0.1392628401517868, -0.650230348110199, 0.38159888982772827, 0.5935102701187134, 1.0065776109695435, -0.11231725662946701, -0.4276195466518402, -0.313916951417923, -0.18837402760982513, -0.20225094258785248, 0.4919583797454834, -0.19708062708377838, -0.4161891043186188, -0.2254008948802948, 0.2740747034549713, -0.33641692996025085, -0.6198659539222717, 0.6269235014915466, -0.23296710848808289, 0.21852782368659973, -0.2518797814846039, -0.4268937110900879, -0.10709000378847122, 0.41586241126060486, -0.641843318939209, 1.3722656965255737, 0.4216565787792206, -0.8758864998817444, 0.08822988718748093, -0.806686282157898, -0.195709690451622, 0.0575452484190464, -0.03669273853302002, -0.5717669725418091, -0.17081032693386078, 0.20365628600120544, 0.5620601773262024, -0.31953269243240356, 0.13926316797733307, -0.25431302189826965, -0.3896752893924713, 0.11552409082651138, -0.15621477365493774, 1.0689020156860352, 0.21640200912952423, -0.4705027937889099, 0.10307593643665314, -0.9017261862754822, 0.12428510189056396, 0.2564946711063385, -0.5837159752845764, -0.16846801340579987, -0.32209697365760803, 0.09053076058626175, 0.11218845099210739, 0.4838753938674927, -0.49137887358665466, 0.37876394391059875, -0.2284555584192276, 0.30232465267181396, 1.0122817754745483, 0.04203297197818756, 0.14902448654174805, -0.46172189712524414, 0.6583379507064819, 0.013167709112167358, 0.288990318775177, 0.1264609843492508, -0.56325364112854, -0.6526656746864319, -0.23050016164779663, 0.1349961906671524, 0.6529704332351685, -0.5980187654495239, 0.7251332402229309, -0.3686941862106323, -0.7117434144020081, -0.660927414894104, 0.16382014751434326, 0.5066028237342834, 0.5194197297096252, 0.39158865809440613, -0.2290416806936264, -0.6745645403862, -0.9666486978530884, -0.020923340693116188, -0.32002168893814087, 0.0746408998966217, 0.5263757705688477, 0.9772698283195496, -0.3416406810283661, 0.5530291199684143, -0.774263858795166, -0.2899205684661865, -0.2832547426223755, 0.11645455658435822, 0.8135343194007874, 0.5182556509971619, 0.5088743567466736, -0.692480742931366, -0.352138489484787, 0.013705994002521038, -0.7506823539733887, -0.3164699673652649, -0.10697987675666809, -0.23694831132888794, 0.35554563999176025, -0.020964257419109344, -0.5234009027481079, 0.5124975442886353, 0.6213555932044983, -0.6154648661613464, 0.6869198083877563, -0.05267436429858208, 0.4543895721435547, -1.077658772468567, 0.20573106408119202, 0.10969074815511703, 0.051198504865169525, -0.4036208391189575, -0.141474649310112, -0.026379603892564774, 0.35699987411499023, -0.4001184403896332, 0.6619897484779358, -0.41483765840530396, -0.1618780940771103, 0.05072256550192833, 0.09224027395248413, -0.060257431119680405, 0.5560031533241272, -0.3037542402744293, 0.7609172463417053, 0.51365065574646, -0.33869069814682007, 0.4536256492137909, 0.46748024225234985, -0.4466042220592499, 0.3047577440738678, -0.5408368110656738, -0.07123589515686035, 0.19016455113887787, 0.1813562661409378, -0.8821926712989807, -0.40163302421569824, 0.45113605260849, -0.6091829538345337, 0.2598274350166321, -0.37624984979629517, -0.5338423848152161, -0.500702440738678, -0.5017768740653992, 0.2033030390739441, 0.4738004207611084, -0.47359591722488403, 0.26667320728302, 0.4010998606681824, -0.040194056928157806, -0.6274781823158264, -0.7133123874664307, -0.15115799009799957, -0.36011627316474915, -0.6073582768440247, 0.35805031657218933, -0.19083143770694733, -0.24188053607940674, 0.05950361117720604, -0.17427469789981842, -0.11930213868618011, 0.17706577479839325, 0.34888017177581787, 0.5987627506256104, -0.07003998011350632, -0.3949613571166992, -0.18399251997470856, -0.14280438423156738, 0.1371859908103943, 0.06431107223033905, 0.5498161911964417, -0.34682780504226685, -0.20721684396266937, -0.2378799170255661, 0.08883670717477798, 0.42765703797340393, -0.11431484669446945, 0.7853460907936096, 0.7105463147163391, -0.262582004070282, 0.010865393094718456, -0.41562262177467346, 0.05197438970208168, -0.502055823802948, 0.2860681116580963, -0.34707579016685486, -0.8076989054679871, 0.7973363995552063, 0.2639073431491852, 0.08083250373601913, 0.7477260231971741, 0.5770719051361084, 0.09061014652252197, 0.7686722874641418, 0.1543934941291809, -0.11809525638818741, 0.5153877139091492, -0.8214155435562134, -0.08754628151655197, -1.1347777843475342, -0.44008180499076843, -0.4902932643890381, -0.3530460596084595, -0.8542394638061523, -0.33090436458587646, 0.292156845331192, 0.22112463414669037, -0.41794970631599426, 0.5308358073234558, -0.7241780161857605, 0.22605577111244202, 0.6407675743103027, 0.14888706803321838, -0.000556182989384979, -0.09389138966798782, -0.10883401334285736, 0.20628705620765686, -0.5176852941513062, -0.34289833903312683, 1.337123155593872, 0.2044506072998047, 0.6486513614654541, 0.014967238530516624, 0.9956133365631104, 0.23667043447494507, 0.3634606897830963, -0.42989516258239746, 0.5923653244972229, -0.02835903875529766, -0.6042384505271912, -0.16477321088314056, -0.7387686967849731, -0.9528710842132568, 0.2500665485858917, 0.044759612530469894, -0.9055039286613464, 0.018502427265048027, 0.01737559586763382, -0.017379142343997955, 0.32907286286354065, -0.5720207095146179, 0.8721545934677124, -0.31499820947647095, -0.3350287973880768, 0.11304342746734619, -0.8339085578918457, 0.4160291254520416, 0.05851220339536667, 0.35644829273223877, -0.2770361006259918, 0.013361524790525436, 1.1875544786453247, -0.5844064950942993, 0.6726528406143188, -0.32000917196273804, 0.08443314582109451, 0.38562873005867004, -0.35784128308296204, 0.522663950920105, -0.02233635075390339, -0.2326025664806366, 0.4979522228240967, -0.09552042186260223, -0.2500399053096771, -0.2917812168598175, 0.9952784776687622, -0.9574408531188965, -0.32295915484428406, -0.5309730768203735, -0.4973166882991791, 0.2744768261909485, 0.31020623445510864, 0.3299405574798584, 0.40348389744758606, 0.12957008183002472, 0.23784880340099335, 0.24045155942440033, -0.20529256761074066, 0.4742242991924286, 0.43060868978500366, -0.20532722771167755, -0.8472993969917297, 0.7151167392730713, 0.3193928301334381, 0.0825054720044136, 0.21196401119232178, 0.09647779911756516, -0.5300425887107849, -0.5163103938102722, -0.4117870330810547, 0.33112475275993347, -0.58351069688797, -0.39499181509017944, -0.4451376497745514, -0.23398786783218384, -0.45732107758522034, -0.05667643994092941, -0.3396025002002716, -0.41468706727027893, -0.4337228536605835, -0.31132620573043823, 0.6180949211120605, 0.6017535924911499, -0.40555691719055176, 0.29259154200553894, -0.7514934539794922, 0.2989298701286316, -0.1560487598180771, 0.45074665546417236, -0.16700823605060577, -0.5684536099433899, -0.36957281827926636, 0.1336544305086136, -0.4017220735549927, -0.8510302901268005, 0.6131030321121216, 0.005919883493334055, 0.7441301941871643, 0.08748258650302887, 0.0763852670788765, 0.8693792819976807, -0.23798121511936188, 1.0565879344940186, -0.008277874439954758, -0.7947288751602173, 0.796634316444397, -0.36395350098609924, 0.11029145866632462, 0.529262363910675, 0.23663738369941711, -0.48517364263534546, -0.2839273512363434, -1.0237746238708496, -1.188489556312561, 1.0650161504745483, 0.5643249154090881, -0.3758886754512787, 0.07736566662788391, 0.36908408999443054, -0.05752488970756531, 0.2585006356239319, -0.647890031337738, -0.7015545964241028, -0.2395518273115158, -0.2962262034416199, -0.0845559760928154, 0.037761010229587555, -0.4108136296272278, -0.4747467339038849, 0.9513251185417175, -0.02928200550377369, 0.5427267551422119, 0.21171432733535767, 0.011677535250782967, -0.09549885988235474, 0.21651385724544525, 0.4917358458042145, 0.7420424222946167, -0.45331457257270813, -0.03318845108151436, 0.2198103666305542, -0.5561692714691162, 0.07395834475755692, 0.42601194977760315, 0.027597997337579727, -0.08109525591135025, 0.675733745098114, 0.9964613914489746, 0.07977987825870514, -0.3693352937698364, 0.5298566222190857, 0.04711132496595383, -0.35376617312431335, -0.4434569776058197, 0.19620181620121002, -0.07540328055620193, 0.3485342860221863, 0.4193556308746338, -0.06979987025260925, 0.017293427139520645, -0.2719835340976715, 0.32282915711402893, 0.2466466873884201, -0.02275051176548004, -0.32161375880241394, 0.635724663734436, -0.03584504872560501, -0.2798823118209839, 0.7451485395431519, -0.15336206555366516, -0.6359607577323914, 1.0841797590255737, 0.3582807183265686, 0.8482946157455444, -0.1521928459405899, 0.1386636197566986, 0.5717452764511108, 0.3737831115722656, -0.1638556271791458, 0.5991611480712891, 0.0921851098537445, -0.60069739818573, -0.27683231234550476, -0.7913116216659546, -0.22344598174095154, 0.3496449291706085, -1.0872957706451416, 0.2987145483493805, -0.1321345865726471, -0.20034417510032654, -0.13717855513095856, 0.4786132872104645, -0.876658022403717, 0.1739581674337387, -0.008731537498533726, 0.889768660068512, -1.0554511547088623, 0.6645895838737488, 0.8168143630027771, -0.4367101490497589, -0.9573292136192322, -0.2708868980407715, 0.10471717268228531, -0.8323954343795776, 0.48078033328056335, 0.3263756036758423, 0.38683608174324036, -0.09247858077287674, -0.6076667904853821, -1.0206297636032104, 1.5396413803100586, 0.12513096630573273, -0.476470023393631, 0.14441825449466705, 0.0541815385222435, 0.3795454800128937, -0.2350391447544098, 0.566387414932251, 0.7324591279029846, 0.7832103371620178, -0.06482645869255066, -0.9714369773864746, 0.29516851902008057, -0.5292958617210388, -0.04600190743803978, 0.41186201572418213, -0.8478512763977051, 1.0181715488433838, -0.08089529722929001, 0.057771749794483185, -0.07546307146549225, 0.3288729786872864, 0.6107822060585022, 0.27557000517845154, 0.4508918225765228, 0.7776055932044983, 0.6855046153068542, -0.4124111235141754, 1.085206151008606, -0.28689655661582947, 0.8880150318145752, 0.9963535070419312, 0.017530804499983788, 0.6786595582962036, 0.33818158507347107, -0.502285897731781, 0.5081005692481995, 0.7956215143203735, -0.4122905433177948, 0.5223316550254822, 0.15950724482536316, -0.00890249665826559, 0.036804620176553726, 0.10135100781917572, -0.521700918674469, 0.4071701169013977, 0.24493268132209778, -0.48934829235076904, -0.15498116612434387, -0.24484945833683014, 0.15187066793441772, -0.3866767883300781, -0.19327470660209656, 0.6179878115653992, -0.06280822306871414, -0.5941630005836487, 0.8216717839241028, -0.0579744316637516, 0.6913880705833435, -0.7152150273323059, -0.15146811306476593, -0.20588243007659912, 0.22641964256763458, -0.5099471211433411, -1.0059081315994263, 0.24292537569999695, 0.1313985139131546, -0.14042110741138458, -0.2840355336666107, 0.58815997838974, -0.34054821729660034, -0.5588694214820862, 0.42668381333351135, 0.3632121682167053, 0.35882750153541565, 0.13075809180736542, -0.9627734422683716, 0.3458232581615448, 0.34431058168411255, -0.8203160762786865, 0.3545454144477844, 0.29336297512054443, 0.16553470492362976, 0.5195850133895874, 0.7610090970993042, 0.22418077290058136, 0.11808755248785019, -0.0990067571401596, 1.0933904647827148, -0.7791522741317749, -0.363662987947464, -0.8369960784912109, 0.8279388546943665, -0.2512792646884918, -0.6876465678215027, 0.8244286775588989, 0.9121620655059814, 0.8961868286132812, 0.16759181022644043, 0.8599742650985718, -0.5383292436599731, 0.4506046175956726, -0.4532122313976288, 0.7598819136619568, -0.6779459714889526, 0.34770965576171875, -0.21682892739772797, -0.7989233136177063, -0.06547430157661438, 0.7688526511192322, -0.1765996217727661, 0.03132166340947151, 0.5814958810806274, 0.9002805948257446, 0.06097140908241272, 0.09714378416538239, -0.1012827455997467, 0.44868379831314087, 0.44668716192245483, 0.6122443079948425, 0.5814840793609619, -0.6601759195327759, 0.4494219422340393, -0.6646435260772705, -0.4729665219783783, -0.18984797596931458, -0.7236358523368835, -0.7980997562408447, -0.5333167910575867, -0.35518190264701843, -0.5220466256141663, -0.0030336794443428516, 1.0392287969589233, 0.46321842074394226, -0.7932007908821106, -0.40803179144859314, -0.04140017181634903, 0.14115111529827118, -0.24889346957206726, -0.36128804087638855, 0.5751994848251343, -0.02037433721125126, -0.7036409974098206, 0.34931230545043945, -0.13928480446338654, -0.07437536120414734, -0.0021819877438247204, -0.28212103247642517, -0.3822580575942993, -0.3459441065788269, 0.3273239731788635, 0.12324215471744537, -0.7143681049346924, -0.3418952524662018, -0.1514415740966797, 0.026641035452485085, 0.2839984595775604, 0.2862251400947571, -0.6021977663040161, 0.07761737704277039, 0.572956383228302, 0.26623162627220154, 0.7162411212921143, 0.09493900090456009, 0.18743491172790527, -0.8381851315498352, -0.031109292060136795, 0.034437473863363266, 0.5173015594482422, 0.2468613237142563, -0.448566734790802, 1.0311696529388428, 0.40004396438598633, -0.7915117740631104, -0.9509410858154297, -0.25422802567481995, -1.1699585914611816, -0.05211161822080612, 1.424285888671875, -0.2930756211280823, -0.2722851037979126, 0.14740441739559174, -0.17125767469406128, 0.3384086489677429, -0.689933180809021, 0.4664194583892822, 0.7538087368011475, -0.4217033386230469, -0.010620301589369774, -0.6764605045318604, 0.29977595806121826, -0.02915680781006813, -1.0117135047912598, 0.00823817029595375, 0.34410423040390015, 0.44741570949554443, 0.2269655019044876, 0.6961079239845276, -0.007607934530824423, -0.19342099130153656, -0.02103995718061924, 0.1517495959997177, -0.2995010018348694, -0.1576661616563797, -0.23339782655239105, 0.08122453093528748, -0.3918863534927368, -0.42489466071128845 ]
open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.2
open-llm-leaderboard
2023-10-22T09:57:11Z
200
0
[ "region:us" ]
null
2023-08-18T11:22:30Z
--- pretty_name: Evaluation run of jondurbin/airoboros-33b-gpt4-1.2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [jondurbin/airoboros-33b-gpt4-1.2](https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.2\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T09:57:02.769369](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.2/blob/main/results_2023-10-22T09-57-02.769369.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.12531459731543623,\n\ \ \"em_stderr\": 0.003390520871377358,\n \"f1\": 0.1920878775167779,\n\ \ \"f1_stderr\": 0.0034989908215168806,\n \"acc\": 0.4364302798094512,\n\ \ \"acc_stderr\": 0.0099585816929879\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.12531459731543623,\n \"em_stderr\": 0.003390520871377358,\n\ \ \"f1\": 0.1920878775167779,\n \"f1_stderr\": 0.0034989908215168806\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0978013646702047,\n \ \ \"acc_stderr\": 0.008182119821849056\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7750591949486977,\n \"acc_stderr\": 0.011735043564126746\n\ \ }\n}\n```" repo_url: https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|arc:challenge|25_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T12:34:22.345109.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T07_36_51.791701 path: - '**/details_harness|drop|3_2023-10-22T07-36-51.791701.parquet' - split: 2023_10_22T09_57_02.769369 path: - '**/details_harness|drop|3_2023-10-22T09-57-02.769369.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T09-57-02.769369.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T07_36_51.791701 path: - '**/details_harness|gsm8k|5_2023-10-22T07-36-51.791701.parquet' - split: 2023_10_22T09_57_02.769369 path: - '**/details_harness|gsm8k|5_2023-10-22T09-57-02.769369.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T09-57-02.769369.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hellaswag|10_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T12:34:22.345109.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T12:34:22.345109.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T12_34_22.345109 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T12:34:22.345109.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T12:34:22.345109.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T07_36_51.791701 path: - '**/details_harness|winogrande|5_2023-10-22T07-36-51.791701.parquet' - split: 2023_10_22T09_57_02.769369 path: - '**/details_harness|winogrande|5_2023-10-22T09-57-02.769369.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T09-57-02.769369.parquet' - config_name: results data_files: - split: 2023_07_31T12_34_22.345109 path: - results_2023-07-31T12:34:22.345109.parquet - split: 2023_10_22T07_36_51.791701 path: - results_2023-10-22T07-36-51.791701.parquet - split: 2023_10_22T09_57_02.769369 path: - results_2023-10-22T09-57-02.769369.parquet - split: latest path: - results_2023-10-22T09-57-02.769369.parquet --- # Dataset Card for Evaluation run of jondurbin/airoboros-33b-gpt4-1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-33b-gpt4-1.2](https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T09:57:02.769369](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.2/blob/main/results_2023-10-22T09-57-02.769369.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.12531459731543623, "em_stderr": 0.003390520871377358, "f1": 0.1920878775167779, "f1_stderr": 0.0034989908215168806, "acc": 0.4364302798094512, "acc_stderr": 0.0099585816929879 }, "harness|drop|3": { "em": 0.12531459731543623, "em_stderr": 0.003390520871377358, "f1": 0.1920878775167779, "f1_stderr": 0.0034989908215168806 }, "harness|gsm8k|5": { "acc": 0.0978013646702047, "acc_stderr": 0.008182119821849056 }, "harness|winogrande|5": { "acc": 0.7750591949486977, "acc_stderr": 0.011735043564126746 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4765244126319885, -0.6444140672683716, 0.14696937799453735, 0.20848341286182404, -0.1673937737941742, 0.0782986730337143, -0.3465195894241333, -0.20239077508449554, 0.3612487316131592, 0.5316309928894043, -0.6576004028320312, -0.9083267450332642, -0.7093999981880188, 0.20713010430335999, -0.15827013552188873, 1.2236284017562866, -0.33108487725257874, -0.3148103952407837, 0.08479830622673035, -0.2750179171562195, -0.32563742995262146, -0.4264398217201233, -0.5083701610565186, -0.4278958737850189, 0.3557066321372986, 0.49124816060066223, 0.38814473152160645, 0.7515980005264282, 0.6391633749008179, 0.38195809721946716, -0.14982454478740692, 0.15672308206558228, -0.4021807014942169, -0.1526225060224533, 0.2597264051437378, -0.5688304305076599, -0.7563549876213074, 0.1617797464132309, 0.6574527025222778, 0.4368566870689392, -0.30200716853141785, 0.55790776014328, 0.11512180417776108, 0.5988783836364746, -0.4008088707923889, 0.39819398522377014, -0.36396241188049316, -0.04119483754038811, -0.3931346535682678, -0.19014236330986023, 0.028985029086470604, -0.33177903294563293, -0.13606728613376617, -0.565564751625061, 0.24534204602241516, 0.13296744227409363, 1.1222721338272095, 0.1329355537891388, -0.18550065159797668, -0.23621688783168793, -0.23882947862148285, 0.8029639720916748, -0.9194104671478271, 0.06146816536784172, 0.6829673647880554, 0.09095941483974457, -0.20528024435043335, -0.5455029010772705, -0.34279850125312805, -0.06697135418653488, -0.26764553785324097, 0.21116513013839722, -0.05740944296121597, -0.0792689397931099, 0.43474081158638, 0.6059701442718506, -0.7871123552322388, 0.009375138208270073, -0.6037524938583374, -0.15804320573806763, 0.8976879119873047, 0.30444663763046265, 0.04642456769943237, -0.5521378517150879, -0.3637273907661438, -0.34209001064300537, -0.45218390226364136, 0.16417263448238373, 0.5739267468452454, 0.4524019956588745, -0.6493539810180664, 0.7296226024627686, -0.43576350808143616, 0.6454309225082397, -0.11684520542621613, -0.27301207184791565, 0.8634424805641174, -0.5864033699035645, -0.27962321043014526, -0.0036643033381551504, 1.026119589805603, 0.40094155073165894, 0.024676993489265442, 0.2235928624868393, -0.2683287262916565, -0.13480180501937866, 0.1041569858789444, -0.7915278077125549, -0.225680410861969, 0.3624412417411804, -0.5706961750984192, -0.4241692125797272, 0.29130053520202637, -0.8986856937408447, -0.08813927322626114, -0.2649984061717987, 0.18970419466495514, -0.2110547572374344, -0.45685386657714844, -0.11215129494667053, -0.1943819373846054, 0.2354256957769394, 0.14057783782482147, -0.6526427268981934, 0.3814416825771332, 0.5803741812705994, 1.012538194656372, -0.10626887530088425, -0.41408973932266235, -0.2709416151046753, -0.1691361516714096, -0.2084304243326187, 0.4851353168487549, -0.15665584802627563, -0.4187279939651489, -0.22474607825279236, 0.25255805253982544, -0.31042423844337463, -0.6185309290885925, 0.6270042657852173, -0.27389711141586304, 0.21473252773284912, -0.24527357518672943, -0.4121910631656647, -0.1196073666214943, 0.4231291115283966, -0.671777069568634, 1.3805164098739624, 0.4427514970302582, -0.875140368938446, 0.09587713330984116, -0.8358482718467712, -0.20481036603450775, 0.05718988925218582, -0.006978944875299931, -0.5861945152282715, -0.18122397363185883, 0.2023148387670517, 0.5544614791870117, -0.3550470471382141, 0.12008637189865112, -0.26926741003990173, -0.37686654925346375, 0.11481402814388275, -0.14882300794124603, 1.0623412132263184, 0.2520686984062195, -0.45295244455337524, 0.06184704974293709, -0.8889695405960083, 0.150384321808815, 0.27380624413490295, -0.5731984376907349, -0.15537720918655396, -0.3253403306007385, 0.09337868541479111, 0.1073739156126976, 0.4645884037017822, -0.48284435272216797, 0.38511985540390015, -0.22726406157016754, 0.2823323607444763, 0.9965768456459045, 0.031746573746204376, 0.14252203702926636, -0.46888086199760437, 0.6455239653587341, 0.007345842197537422, 0.2979601323604584, 0.14235836267471313, -0.5666712522506714, -0.6623954772949219, -0.24183399975299835, 0.12949779629707336, 0.6488698720932007, -0.5848268270492554, 0.7008981704711914, -0.3875586986541748, -0.709419846534729, -0.659631073474884, 0.18137581646442413, 0.4959421157836914, 0.5112711191177368, 0.39538952708244324, -0.20436008274555206, -0.7020343542098999, -0.976830005645752, -0.027131425216794014, -0.33691510558128357, 0.059748947620391846, 0.5009847283363342, 0.9853436946868896, -0.3187926709651947, 0.6184771656990051, -0.7919454574584961, -0.2979257106781006, -0.28491079807281494, 0.13500791788101196, 0.8059616684913635, 0.5186944603919983, 0.4923110604286194, -0.6865400671958923, -0.36409151554107666, 0.02653687447309494, -0.7816579937934875, -0.309014230966568, -0.0854182168841362, -0.21882320940494537, 0.3467314839363098, -0.029816875234246254, -0.5320999026298523, 0.49974897503852844, 0.599288284778595, -0.576988935470581, 0.6797438263893127, -0.04200403019785881, 0.44680190086364746, -1.1133564710617065, 0.19984740018844604, 0.09640492498874664, 0.04726209491491318, -0.4119640588760376, -0.14648181200027466, -0.010179704055190086, 0.3598337769508362, -0.398730605840683, 0.6566540598869324, -0.43142345547676086, -0.15901999175548553, 0.04703507572412491, 0.08637091517448425, -0.050597403198480606, 0.5501044988632202, -0.3396713137626648, 0.7673642635345459, 0.5080138444900513, -0.3349025249481201, 0.45938754081726074, 0.4664066433906555, -0.4614145755767822, 0.3245636820793152, -0.5270727276802063, -0.04454716295003891, 0.17496782541275024, 0.19358649849891663, -0.8685160279273987, -0.4207656681537628, 0.4408011734485626, -0.6065899133682251, 0.2616966962814331, -0.3825177550315857, -0.5497216582298279, -0.5379438996315002, -0.4904840886592865, 0.20203058421611786, 0.4703209400177002, -0.4681524634361267, 0.2629527449607849, 0.41117385029792786, -0.051533110439777374, -0.6181868314743042, -0.7280494570732117, -0.14493267238140106, -0.3816866874694824, -0.5810144543647766, 0.34174349904060364, -0.1617758572101593, -0.23706050217151642, 0.051788948476314545, -0.14792881906032562, -0.11338334530591965, 0.1634111851453781, 0.3535878360271454, 0.5951149463653564, -0.06364525854587555, -0.38568466901779175, -0.2025837004184723, -0.13688471913337708, 0.12940536439418793, 0.046095605939626694, 0.5455848574638367, -0.3547060489654541, -0.234711155295372, -0.22629664838314056, 0.09804506599903107, 0.46567249298095703, -0.11537958681583405, 0.7805870175361633, 0.7087164521217346, -0.2749432623386383, 0.027287418022751808, -0.44723162055015564, 0.08669032156467438, -0.49621710181236267, 0.3181617558002472, -0.35353323817253113, -0.8100697994232178, 0.8089291453361511, 0.2577783763408661, 0.06185286492109299, 0.7568715810775757, 0.5749373435974121, 0.06475433707237244, 0.7882089614868164, 0.14264671504497528, -0.14495472609996796, 0.49412304162979126, -0.8232088088989258, -0.08387278020381927, -1.1639279127120972, -0.4451524019241333, -0.49675309658050537, -0.3218219578266144, -0.8576274514198303, -0.3320081830024719, 0.293805330991745, 0.24808700382709503, -0.4398203194141388, 0.5389832258224487, -0.6980566382408142, 0.23564252257347107, 0.627740740776062, 0.15107598900794983, -0.021276723593473434, -0.092471644282341, -0.09358105808496475, 0.22367723286151886, -0.5170125961303711, -0.3607180416584015, 1.3564155101776123, 0.21997126936912537, 0.6535072922706604, 0.048087526112794876, 0.9777999520301819, 0.24447645246982574, 0.40650656819343567, -0.4394669830799103, 0.5745968818664551, -0.00858145859092474, -0.6161324381828308, -0.16670747101306915, -0.7178297638893127, -0.9497063159942627, 0.24406863749027252, 0.05042915418744087, -0.9253202676773071, 0.002950207330286503, 0.015039524994790554, -0.02758079022169113, 0.33332493901252747, -0.5833941698074341, 0.8378796577453613, -0.3087964951992035, -0.35538119077682495, 0.12467970699071884, -0.8440044522285461, 0.4234480559825897, 0.046032581478357315, 0.36978787183761597, -0.2827957272529602, 0.026970287784934044, 1.1897178888320923, -0.5890054106712341, 0.6600998640060425, -0.3093097507953644, 0.10977274924516678, 0.39389100670814514, -0.3563823699951172, 0.5685850977897644, -0.010108966380357742, -0.24729977548122406, 0.48546475172042847, -0.12208608537912369, -0.2731773257255554, -0.3127381503582001, 0.9769991040229797, -0.963887631893158, -0.34171804785728455, -0.5143506526947021, -0.5225204825401306, 0.241082563996315, 0.31702426075935364, 0.335752010345459, 0.3763429820537567, 0.13661427795886993, 0.23516835272312164, 0.24607893824577332, -0.16449251770973206, 0.4849407374858856, 0.4343660771846771, -0.18783830106258392, -0.8625326752662659, 0.7022212147712708, 0.30098408460617065, 0.08507046103477478, 0.1825619637966156, 0.061254799365997314, -0.5542805790901184, -0.519940972328186, -0.42818483710289, 0.3248060345649719, -0.5953762531280518, -0.41294026374816895, -0.4410710036754608, -0.21948009729385376, -0.45807087421417236, -0.057100631296634674, -0.35358142852783203, -0.4005506932735443, -0.44769802689552307, -0.31805533170700073, 0.6285570859909058, 0.6253038048744202, -0.3875136077404022, 0.31372135877609253, -0.746375322341919, 0.2906376123428345, -0.14383572340011597, 0.4761843979358673, -0.17821164429187775, -0.5379534959793091, -0.4029284715652466, 0.14380604028701782, -0.39141741394996643, -0.8783658742904663, 0.6196168065071106, -0.01484945323318243, 0.7478200793266296, 0.1074063777923584, 0.070297010242939, 0.8701335191726685, -0.2121291309595108, 1.0490421056747437, -0.00015445162716787308, -0.7732667326927185, 0.7972465753555298, -0.37257158756256104, 0.1178402528166771, 0.5528344511985779, 0.22472022473812103, -0.46942371129989624, -0.288029283285141, -1.0072805881500244, -1.1846777200698853, 1.075821042060852, 0.5896723866462708, -0.38169118762016296, 0.08926001191139221, 0.36241692304611206, -0.06658252328634262, 0.24654947221279144, -0.6551163196563721, -0.7156615257263184, -0.24839386343955994, -0.2925860583782196, -0.06610123068094254, 0.040604084730148315, -0.43720707297325134, -0.4513530135154724, 0.9499613046646118, -0.04301884025335312, 0.5336892604827881, 0.20153094828128815, 0.014270566403865814, -0.07045998424291611, 0.23268607258796692, 0.49056336283683777, 0.7299569249153137, -0.462769478559494, -0.030040951445698738, 0.25185686349868774, -0.5793617367744446, 0.088888980448246, 0.41480112075805664, 0.046639684587717056, -0.10580538958311081, 0.6670059561729431, 0.9961193799972534, 0.007094530388712883, -0.3378947973251343, 0.5159982442855835, 0.06103299930691719, -0.3473660945892334, -0.44314542412757874, 0.18956850469112396, -0.08288711309432983, 0.33624395728111267, 0.4466203451156616, -0.04965582862496376, 0.007925186306238174, -0.2558709383010864, 0.30173712968826294, 0.24023762345314026, -0.04830026999115944, -0.2906269133090973, 0.6269408464431763, -0.0533122792840004, -0.2776387631893158, 0.7606480121612549, -0.16401585936546326, -0.6538794040679932, 1.097320795059204, 0.38373905420303345, 0.8582890033721924, -0.13436445593833923, 0.12394395470619202, 0.5737220048904419, 0.3799028694629669, -0.1946886032819748, 0.5851460695266724, 0.10103827714920044, -0.5973219275474548, -0.2586743235588074, -0.783255934715271, -0.22520075738430023, 0.35478878021240234, -1.0970832109451294, 0.30111685395240784, -0.09760081022977829, -0.21801768243312836, -0.1642865240573883, 0.4692426323890686, -0.8892558813095093, 0.20991170406341553, -0.005851301830261946, 0.8801434636116028, -1.0662931203842163, 0.6754039525985718, 0.8089136481285095, -0.430367648601532, -0.9707845449447632, -0.2569940984249115, 0.11290335655212402, -0.8247168064117432, 0.4725171625614166, 0.3155471980571747, 0.39582309126853943, -0.08181016892194748, -0.6057579517364502, -1.0510876178741455, 1.5620741844177246, 0.12944380939006805, -0.4577404260635376, 0.17526283860206604, 0.08945814520120621, 0.3923446834087372, -0.23685568571090698, 0.6080450415611267, 0.7353335022926331, 0.7785759568214417, -0.06532832980155945, -0.9559218883514404, 0.3033216893672943, -0.5550365447998047, -0.01544879749417305, 0.3822177052497864, -0.8551027178764343, 1.0293396711349487, -0.10047204792499542, 0.05115259066224098, -0.08613064140081406, 0.3344579339027405, 0.6216341257095337, 0.2595888674259186, 0.4490402340888977, 0.7866188287734985, 0.7033805847167969, -0.43219056725502014, 1.108437180519104, -0.28008654713630676, 0.9120015501976013, 1.0030341148376465, 0.01430368423461914, 0.6523392796516418, 0.3378964960575104, -0.5195462107658386, 0.5028003454208374, 0.8127206563949585, -0.41388261318206787, 0.5146635174751282, 0.1735202819108963, -0.01746184006333351, 0.06240164488554001, 0.1038065254688263, -0.5412048101425171, 0.4063807427883148, 0.23109392821788788, -0.46667277812957764, -0.15342767536640167, -0.2465953379869461, 0.17193655669689178, -0.4192531406879425, -0.18971224129199982, 0.6044304370880127, -0.07758016139268875, -0.5966769456863403, 0.8162058591842651, -0.05908673256635666, 0.6655498743057251, -0.7115198969841003, -0.14579786360263824, -0.22006471455097198, 0.20689216256141663, -0.5199011564254761, -1.0179532766342163, 0.24115413427352905, 0.13346517086029053, -0.15829278528690338, -0.2651803493499756, 0.577194333076477, -0.30871009826660156, -0.5443816781044006, 0.40439701080322266, 0.3744063973426819, 0.34715333580970764, 0.12474743276834488, -0.9415714144706726, 0.3114374876022339, 0.32412290573120117, -0.840823233127594, 0.34434956312179565, 0.27083274722099304, 0.14018075168132782, 0.5367095470428467, 0.7750577330589294, 0.22871366143226624, 0.10016326606273651, -0.09573940187692642, 1.1030791997909546, -0.779388964176178, -0.378090500831604, -0.8401423096656799, 0.8345124125480652, -0.27656471729278564, -0.6982116103172302, 0.8258134126663208, 0.9400854706764221, 0.8843387365341187, 0.15919125080108643, 0.9039388298988342, -0.5445772409439087, 0.4501085877418518, -0.4419352114200592, 0.7732147574424744, -0.6668224930763245, 0.36572012305259705, -0.21162265539169312, -0.7841522097587585, -0.0904553234577179, 0.7407237887382507, -0.1857595443725586, 0.04240195453166962, 0.592165470123291, 0.9205063581466675, 0.08894167840480804, 0.11977408081293106, -0.11622115969657898, 0.44187965989112854, 0.42742326855659485, 0.5924296975135803, 0.5849732160568237, -0.6766678094863892, 0.4489992558956146, -0.6463063359260559, -0.4593205153942108, -0.17747046053409576, -0.6994840502738953, -0.7756524682044983, -0.5174158215522766, -0.3299054503440857, -0.5279185771942139, 0.0019110870780423284, 1.0223116874694824, 0.49609804153442383, -0.806564211845398, -0.4033466577529907, -0.06596889346837997, 0.16357088088989258, -0.24787671864032745, -0.36445754766464233, 0.5727840662002563, -0.02180357091128826, -0.7023729681968689, 0.32299232482910156, -0.14300444722175598, -0.06325285881757736, 0.008950570598244667, -0.2691958248615265, -0.36642932891845703, -0.3556222915649414, 0.3341066241264343, 0.13013824820518494, -0.7089231014251709, -0.33517611026763916, -0.13960345089435577, -0.009851107373833656, 0.2876337170600891, 0.28402945399284363, -0.6000656485557556, 0.08603102713823318, 0.566401481628418, 0.22550131380558014, 0.7418296933174133, 0.09403099119663239, 0.18367543816566467, -0.824554443359375, -0.04162239283323288, 0.03533324971795082, 0.5018592476844788, 0.24094462394714355, -0.45493578910827637, 1.0452936887741089, 0.3842047452926636, -0.7673861980438232, -0.9307872653007507, -0.2503899931907654, -1.1613337993621826, -0.023604342713952065, 1.4295169115066528, -0.30739685893058777, -0.26513224840164185, 0.10147964954376221, -0.1656760722398758, 0.3642095923423767, -0.6661921739578247, 0.4760332405567169, 0.7595393061637878, -0.42698630690574646, 0.0003934128617402166, -0.7011944055557251, 0.3018777072429657, -0.022434230893850327, -1.0354528427124023, -0.003049453254789114, 0.31588417291641235, 0.4430358409881592, 0.22461092472076416, 0.6899471879005432, -0.024154460057616234, -0.20346882939338684, -0.013468788005411625, 0.1784079372882843, -0.29921191930770874, -0.13747328519821167, -0.21800459921360016, 0.11189708113670349, -0.38628292083740234, -0.4368523359298706 ]
open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4-1.2
open-llm-leaderboard
2023-10-21T15:46:21Z
200
0
[ "region:us" ]
null
2023-08-18T11:22:46Z
--- pretty_name: Evaluation run of jondurbin/airoboros-7b-gpt4-1.2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [jondurbin/airoboros-7b-gpt4-1.2](https://huggingface.co/jondurbin/airoboros-7b-gpt4-1.2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4-1.2\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T15:46:08.253072](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4-1.2/blob/main/results_2023-10-21T15-46-08.253072.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1579278523489933,\n\ \ \"em_stderr\": 0.00373459634198771,\n \"f1\": 0.21763213087248284,\n\ \ \"f1_stderr\": 0.003838141702918339,\n \"acc\": 0.3689408577089266,\n\ \ \"acc_stderr\": 0.008317600432676979\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.1579278523489933,\n \"em_stderr\": 0.00373459634198771,\n\ \ \"f1\": 0.21763213087248284,\n \"f1_stderr\": 0.003838141702918339\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.02122820318423048,\n \ \ \"acc_stderr\": 0.003970449129848635\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7166535122336227,\n \"acc_stderr\": 0.012664751735505323\n\ \ }\n}\n```" repo_url: https://huggingface.co/jondurbin/airoboros-7b-gpt4-1.2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|arc:challenge|25_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-31T14:04:39.266883.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T15_46_08.253072 path: - '**/details_harness|drop|3_2023-10-21T15-46-08.253072.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T15-46-08.253072.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T15_46_08.253072 path: - '**/details_harness|gsm8k|5_2023-10-21T15-46-08.253072.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T15-46-08.253072.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hellaswag|10_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:04:39.266883.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-31T14:04:39.266883.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_31T14_04_39.266883 path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T14:04:39.266883.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-31T14:04:39.266883.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T15_46_08.253072 path: - '**/details_harness|winogrande|5_2023-10-21T15-46-08.253072.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T15-46-08.253072.parquet' - config_name: results data_files: - split: 2023_07_31T14_04_39.266883 path: - results_2023-07-31T14:04:39.266883.parquet - split: 2023_10_21T15_46_08.253072 path: - results_2023-10-21T15-46-08.253072.parquet - split: latest path: - results_2023-10-21T15-46-08.253072.parquet --- # Dataset Card for Evaluation run of jondurbin/airoboros-7b-gpt4-1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-7b-gpt4-1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-7b-gpt4-1.2](https://huggingface.co/jondurbin/airoboros-7b-gpt4-1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4-1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T15:46:08.253072](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-7b-gpt4-1.2/blob/main/results_2023-10-21T15-46-08.253072.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1579278523489933, "em_stderr": 0.00373459634198771, "f1": 0.21763213087248284, "f1_stderr": 0.003838141702918339, "acc": 0.3689408577089266, "acc_stderr": 0.008317600432676979 }, "harness|drop|3": { "em": 0.1579278523489933, "em_stderr": 0.00373459634198771, "f1": 0.21763213087248284, "f1_stderr": 0.003838141702918339 }, "harness|gsm8k|5": { "acc": 0.02122820318423048, "acc_stderr": 0.003970449129848635 }, "harness|winogrande|5": { "acc": 0.7166535122336227, "acc_stderr": 0.012664751735505323 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.46826937794685364, -0.6545556783676147, 0.13766810297966003, 0.1910049170255661, -0.18980981409549713, 0.07788046449422836, -0.34594282507896423, -0.20685020089149475, 0.37542200088500977, 0.5480457544326782, -0.6509708762168884, -0.8983727693557739, -0.7301282286643982, 0.18960224092006683, -0.16480207443237305, 1.2223491668701172, -0.3251064419746399, -0.3134534955024719, 0.08025640994310379, -0.29277679324150085, -0.34719714522361755, -0.44063636660575867, -0.5149818658828735, -0.41758647561073303, 0.3832321763038635, 0.4781906306743622, 0.39719104766845703, 0.7540165781974792, 0.6320270299911499, 0.3796443045139313, -0.15973857045173645, 0.1572699397802353, -0.41126003861427307, -0.14716336131095886, 0.25096607208251953, -0.5462995171546936, -0.7461075782775879, 0.14972493052482605, 0.6790157556533813, 0.44784969091415405, -0.29026663303375244, 0.5735672116279602, 0.11109790205955505, 0.5828052163124084, -0.40040549635887146, 0.40853267908096313, -0.3625009059906006, -0.0625808984041214, -0.3801696300506592, -0.19133774936199188, 0.02575407363474369, -0.32351428270339966, -0.15465812385082245, -0.5542731881141663, 0.23707176744937897, 0.12831169366836548, 1.0962142944335938, 0.1429227888584137, -0.1909485161304474, -0.2503046989440918, -0.24341699481010437, 0.7895032167434692, -0.9219518899917603, 0.06672940403223038, 0.6512379050254822, 0.10029472410678864, -0.21949917078018188, -0.554397463798523, -0.3466390073299408, -0.06616740673780441, -0.28794625401496887, 0.20705188810825348, -0.029855843633413315, -0.0749025046825409, 0.45397815108299255, 0.5799533724784851, -0.8092324137687683, -0.005723071750253439, -0.6227222681045532, -0.15919606387615204, 0.8977015614509583, 0.29102498292922974, 0.05524851009249687, -0.5449008345603943, -0.36166638135910034, -0.35113289952278137, -0.4534585475921631, 0.180482879281044, 0.5615249872207642, 0.45447656512260437, -0.6360834240913391, 0.7198918461799622, -0.4246218800544739, 0.6236484050750732, -0.10864654928445816, -0.2718009948730469, 0.8594518303871155, -0.5799152255058289, -0.27987462282180786, 0.03970768675208092, 0.9955984950065613, 0.391497939825058, -0.00905105471611023, 0.21295581758022308, -0.2780688405036926, -0.10211115330457687, 0.08466269820928574, -0.801398754119873, -0.229544535279274, 0.3557732403278351, -0.5652952790260315, -0.41947609186172485, 0.29603391885757446, -0.8854252099990845, -0.09097844362258911, -0.2872706651687622, 0.215056374669075, -0.16929976642131805, -0.47565191984176636, -0.11005552858114243, -0.19678449630737305, 0.23871254920959473, 0.13453730940818787, -0.6475090980529785, 0.37927791476249695, 0.5936222076416016, 0.9998301863670349, -0.09516222029924393, -0.4108574390411377, -0.303361177444458, -0.16499438881874084, -0.20875275135040283, 0.4918166995048523, -0.18400457501411438, -0.42852678894996643, -0.21722307801246643, 0.2656594514846802, -0.31822776794433594, -0.6219147443771362, 0.6289105415344238, -0.26747822761535645, 0.21722806990146637, -0.2766115665435791, -0.4100167453289032, -0.1350076049566269, 0.42123904824256897, -0.6581228971481323, 1.3987174034118652, 0.43740612268447876, -0.8822748064994812, 0.09178882092237473, -0.8356974720954895, -0.19525837898254395, 0.058957718312740326, -0.01982671581208706, -0.5775832533836365, -0.17519626021385193, 0.18845605850219727, 0.5478037595748901, -0.33976584672927856, 0.12939763069152832, -0.2699701189994812, -0.39232495427131653, 0.11163441836833954, -0.1542307436466217, 1.0565495491027832, 0.22183838486671448, -0.46130257844924927, 0.07939103990793228, -0.8951836228370667, 0.1486044079065323, 0.2724573612213135, -0.5651121139526367, -0.16035957634449005, -0.3185102343559265, 0.09887207299470901, 0.12532757222652435, 0.46192607283592224, -0.46673429012298584, 0.40190789103507996, -0.2329675704240799, 0.28248414397239685, 1.0056910514831543, 0.029903942719101906, 0.13850274682044983, -0.4620438814163208, 0.6375287771224976, 0.01501045934855938, 0.3024435043334961, 0.13456444442272186, -0.5592723488807678, -0.6728119254112244, -0.22451534867286682, 0.13564853370189667, 0.6484925746917725, -0.6029332280158997, 0.7093333601951599, -0.3845512866973877, -0.7024324536323547, -0.6722366809844971, 0.1848972886800766, 0.5118128061294556, 0.5275799036026001, 0.3943265378475189, -0.23162326216697693, -0.6759121417999268, -0.9705713987350464, -0.018663600087165833, -0.3167416453361511, 0.08548323810100555, 0.4943530261516571, 0.9788292050361633, -0.32384663820266724, 0.6031496524810791, -0.775165319442749, -0.2664230167865753, -0.30582281947135925, 0.1134633794426918, 0.8023452758789062, 0.5213086009025574, 0.5147145390510559, -0.6963664889335632, -0.3612506687641144, 0.014137058518826962, -0.777472198009491, -0.3305021822452545, -0.08868879824876785, -0.25168266892433167, 0.346462607383728, -0.04310588538646698, -0.527753472328186, 0.5261362791061401, 0.6084951162338257, -0.602362871170044, 0.6736866235733032, -0.03505139797925949, 0.45299971103668213, -1.121726632118225, 0.2076176404953003, 0.08126110583543777, 0.04268880560994148, -0.41757825016975403, -0.1492890566587448, -0.018344510346651077, 0.37413743138313293, -0.4085172116756439, 0.6823960542678833, -0.4369148313999176, -0.1714293360710144, 0.03266781195998192, 0.08968089520931244, -0.04908442124724388, 0.5570904016494751, -0.33428147435188293, 0.7574067711830139, 0.5126333832740784, -0.33213573694229126, 0.46583738923072815, 0.48004019260406494, -0.45109662413597107, 0.3052135109901428, -0.5449455976486206, -0.028221532702445984, 0.1584000438451767, 0.20160755515098572, -0.8924846649169922, -0.4227484464645386, 0.46190762519836426, -0.6050931215286255, 0.2767690122127533, -0.4009786546230316, -0.5565955638885498, -0.5202375650405884, -0.48715952038764954, 0.21803300082683563, 0.49986523389816284, -0.48895785212516785, 0.27016085386276245, 0.3948248326778412, -0.06935049593448639, -0.6174956560134888, -0.7075558304786682, -0.15651287138462067, -0.3720671832561493, -0.5916860699653625, 0.309700071811676, -0.17985686659812927, -0.23270374536514282, 0.04858487471938133, -0.15206629037857056, -0.10033266991376877, 0.16139858961105347, 0.37334755063056946, 0.5988828539848328, -0.06644245237112045, -0.384956419467926, -0.15131504833698273, -0.14794926345348358, 0.13658222556114197, 0.05553092062473297, 0.5415557026863098, -0.310031533241272, -0.24405914545059204, -0.21789468824863434, 0.1082853451371193, 0.455264151096344, -0.10766106098890305, 0.7837837934494019, 0.725311815738678, -0.2731684446334839, 0.01312352530658245, -0.4573323130607605, 0.07498660683631897, -0.4990233778953552, 0.3152593970298767, -0.3471854627132416, -0.8034462332725525, 0.8081125020980835, 0.25120916962623596, 0.07859443873167038, 0.7562794089317322, 0.5870162844657898, 0.09645421802997589, 0.7906243205070496, 0.13405898213386536, -0.13632851839065552, 0.49772536754608154, -0.8272455334663391, -0.08795561641454697, -1.1744171380996704, -0.44828298687934875, -0.4903867244720459, -0.3485240340232849, -0.8365190029144287, -0.3112213909626007, 0.3042142391204834, 0.2604514956474304, -0.4282264709472656, 0.5361831784248352, -0.7218037247657776, 0.24049116671085358, 0.6406580805778503, 0.1537245661020279, -0.018550699576735497, -0.08249497413635254, -0.09330245852470398, 0.22613480687141418, -0.5215147733688354, -0.3576435446739197, 1.3415138721466064, 0.21231503784656525, 0.6440439224243164, 0.04984763637185097, 0.9781979918479919, 0.24778246879577637, 0.40856850147247314, -0.4209926426410675, 0.5794627666473389, -0.010249555110931396, -0.6021141409873962, -0.1547635793685913, -0.719457745552063, -0.9528323411941528, 0.2664346396923065, 0.059188395738601685, -0.9146541953086853, 0.011940176598727703, 0.035038113594055176, -0.0009336252114735544, 0.32077184319496155, -0.601779043674469, 0.864852249622345, -0.3144097924232483, -0.3530430197715759, 0.12523379921913147, -0.8621350526809692, 0.41259926557540894, 0.07364474982023239, 0.3660281002521515, -0.2792844772338867, 0.04143325611948967, 1.1997162103652954, -0.5804277062416077, 0.6764572858810425, -0.3254908621311188, 0.1321800798177719, 0.4028535485267639, -0.3561706840991974, 0.5609530210494995, -0.01140772458165884, -0.2272852659225464, 0.48447468876838684, -0.10865489393472672, -0.27848973870277405, -0.3273407518863678, 0.9895721077919006, -0.9491742253303528, -0.3329460322856903, -0.522306501865387, -0.49951472878456116, 0.2552051842212677, 0.30650097131729126, 0.33951476216316223, 0.4060322344303131, 0.14696812629699707, 0.2199891209602356, 0.247543603181839, -0.16678637266159058, 0.4791744649410248, 0.4223254919052124, -0.19508886337280273, -0.8642898797988892, 0.7145682573318481, 0.32135874032974243, 0.09280802309513092, 0.1727612167596817, 0.0800030305981636, -0.5688843131065369, -0.5176022052764893, -0.44301170110702515, 0.335427850484848, -0.6166465282440186, -0.3969753086566925, -0.4360215365886688, -0.22194555401802063, -0.4797024130821228, -0.058156345039606094, -0.3336520493030548, -0.4067763686180115, -0.4382718503475189, -0.32395467162132263, 0.6159711480140686, 0.6304725408554077, -0.37313440442085266, 0.3046434223651886, -0.7637085914611816, 0.2924098074436188, -0.1656072735786438, 0.4634118974208832, -0.15952403843402863, -0.567651629447937, -0.39654386043548584, 0.14691297709941864, -0.3902536928653717, -0.8679282069206238, 0.6201392412185669, -0.0180838480591774, 0.7526683807373047, 0.09360839426517487, 0.06485842913389206, 0.8707659244537354, -0.20403817296028137, 1.0523782968521118, 0.003901418298482895, -0.7550417184829712, 0.7931753993034363, -0.35094645619392395, 0.10617070645093918, 0.5423348546028137, 0.20660728216171265, -0.4787781834602356, -0.26239654421806335, -1.0068780183792114, -1.1879481077194214, 1.0663304328918457, 0.583106279373169, -0.3776775896549225, 0.08442070335149765, 0.38330549001693726, -0.05078037828207016, 0.26688140630722046, -0.6415449380874634, -0.712426483631134, -0.24894046783447266, -0.3013010621070862, -0.08173809200525284, 0.02138950489461422, -0.42855140566825867, -0.46208837628364563, 0.9547345042228699, -0.028907977044582367, 0.518273115158081, 0.20124076306819916, 0.0003696616040542722, -0.07573312520980835, 0.22899235785007477, 0.4946824908256531, 0.7137462496757507, -0.43529239296913147, -0.03909379988908768, 0.23755493760108948, -0.5752867460250854, 0.08106055110692978, 0.4093305468559265, 0.03516186401247978, -0.08764494955539703, 0.6718713641166687, 1.006321907043457, 0.03234457969665527, -0.34979212284088135, 0.5110132694244385, 0.07071217149496078, -0.34107762575149536, -0.4371907114982605, 0.20967571437358856, -0.0856519564986229, 0.34907132387161255, 0.4196990728378296, -0.07030649483203888, 0.023768354207277298, -0.2681847810745239, 0.28874677419662476, 0.23366162180900574, -0.02934919483959675, -0.3157327175140381, 0.6305719017982483, -0.04333006963133812, -0.26954007148742676, 0.7524238228797913, -0.18314234912395477, -0.6613878011703491, 1.1060541868209839, 0.3726045787334442, 0.87424236536026, -0.13787539303302765, 0.1053428202867508, 0.5693443417549133, 0.36576923727989197, -0.20045027136802673, 0.5979998707771301, 0.1094084307551384, -0.5775609612464905, -0.2606402635574341, -0.7936360239982605, -0.2212609201669693, 0.3425580561161041, -1.0838102102279663, 0.3050675392150879, -0.10387592762708664, -0.20256121456623077, -0.16763640940189362, 0.4708782732486725, -0.8916718363761902, 0.1829092651605606, -0.0094626285135746, 0.8644771575927734, -1.066019058227539, 0.6648697853088379, 0.8150243163108826, -0.4315283000469208, -0.9715502262115479, -0.28830859065055847, 0.10736403614282608, -0.820915699005127, 0.47904184460639954, 0.32678815722465515, 0.40596282482147217, -0.08802197128534317, -0.6116501688957214, -1.041297197341919, 1.5702935457229614, 0.1306859403848648, -0.4653129279613495, 0.17435170710086823, 0.08201193809509277, 0.38128194212913513, -0.23072907328605652, 0.5928202867507935, 0.7295612096786499, 0.7905824780464172, -0.039278119802474976, -0.9655289053916931, 0.2934250831604004, -0.5520732402801514, -0.019460981711745262, 0.40702930092811584, -0.8586831092834473, 1.0134347677230835, -0.09124399721622467, 0.048344049602746964, -0.08555326610803604, 0.3220665454864502, 0.5986548066139221, 0.2935875356197357, 0.43985581398010254, 0.7876700758934021, 0.68610680103302, -0.42480650544166565, 1.0955427885055542, -0.2759566009044647, 0.9065811634063721, 1.0213361978530884, 0.0018082564929500222, 0.6575916409492493, 0.32501891255378723, -0.5004590153694153, 0.49529463052749634, 0.8063225746154785, -0.4064938426017761, 0.5099281668663025, 0.1706661880016327, -0.017562024295330048, 0.047374024987220764, 0.1017271876335144, -0.5490832924842834, 0.3823917508125305, 0.24277880787849426, -0.47943609952926636, -0.1503099948167801, -0.24744562804698944, 0.15530972182750702, -0.4212746322154999, -0.20521031320095062, 0.6214503645896912, -0.07511138170957565, -0.593800961971283, 0.8052247762680054, -0.06496710330247879, 0.6561362743377686, -0.7149325609207153, -0.16819632053375244, -0.21246369183063507, 0.20885996520519257, -0.5219302773475647, -0.9995125532150269, 0.2440251111984253, 0.11949361860752106, -0.16045065224170685, -0.2470557987689972, 0.5914482474327087, -0.34329283237457275, -0.5661398768424988, 0.4054788649082184, 0.36858731508255005, 0.3454189896583557, 0.12775738537311554, -0.9452791810035706, 0.3287905156612396, 0.3209739625453949, -0.842787504196167, 0.3677082061767578, 0.2696392238140106, 0.1327436864376068, 0.5248991250991821, 0.7849450707435608, 0.22224976122379303, 0.12920939922332764, -0.08043861389160156, 1.1060197353363037, -0.7711488604545593, -0.3794540464878082, -0.8287028670310974, 0.8359613418579102, -0.25838834047317505, -0.692901074886322, 0.8027513027191162, 0.9392664432525635, 0.880821704864502, 0.1609245240688324, 0.9062720537185669, -0.5264642238616943, 0.46204128861427307, -0.4537959098815918, 0.7589083313941956, -0.657153844833374, 0.3581082224845886, -0.19015292823314667, -0.8057970404624939, -0.08207698911428452, 0.7444849014282227, -0.1878834366798401, 0.04871582239866257, 0.5865967869758606, 0.9331732988357544, 0.07153904438018799, 0.07620224356651306, -0.10146558284759521, 0.45305314660072327, 0.4425785541534424, 0.5961412191390991, 0.560017466545105, -0.6745977997779846, 0.43827390670776367, -0.6415382623672485, -0.4589283764362335, -0.1848430335521698, -0.6954809427261353, -0.8028848767280579, -0.5090190768241882, -0.33337515592575073, -0.5436817407608032, 0.020636193454265594, 1.0462301969528198, 0.46362075209617615, -0.8077576756477356, -0.3882313668727875, -0.03671152889728546, 0.16235701739788055, -0.24014738202095032, -0.36204248666763306, 0.590595006942749, -0.01032103318721056, -0.7039270997047424, 0.3084869384765625, -0.13504306972026825, -0.06244879961013794, -0.002702105324715376, -0.27908855676651, -0.3453160524368286, -0.34330490231513977, 0.3414078950881958, 0.13272404670715332, -0.7262623310089111, -0.34212297201156616, -0.1547388732433319, -0.00014651553647127002, 0.27256760001182556, 0.28740012645721436, -0.5984331369400024, 0.0953669622540474, 0.5706322193145752, 0.238763228058815, 0.7076583504676819, 0.10127826035022736, 0.1534799337387085, -0.825240969657898, -0.02545696124434471, 0.02393115870654583, 0.5192058086395264, 0.24014592170715332, -0.4504222869873047, 1.029901385307312, 0.401028037071228, -0.7705718278884888, -0.9373834729194641, -0.2501715421676636, -1.19552481174469, -0.015806667506694794, 1.447318434715271, -0.293622225522995, -0.2699893116950989, 0.11084964871406555, -0.1955244392156601, 0.3618166148662567, -0.6856290102005005, 0.4870549738407135, 0.7491530179977417, -0.4127291738986969, -0.0006288018776103854, -0.6846956014633179, 0.29572156071662903, -0.03862559050321579, -1.0236880779266357, -0.0010359376901760697, 0.3243400454521179, 0.44940605759620667, 0.21302218735218048, 0.6929521560668945, -0.006677587982267141, -0.181626096367836, -0.006830745376646519, 0.16414766013622284, -0.31800606846809387, -0.14500421285629272, -0.21768401563167572, 0.09392040967941284, -0.3846723735332489, -0.4302796423435211 ]
open-llm-leaderboard/details_TheBloke__Nous-Hermes-13B-SuperHOT-8K-fp16
open-llm-leaderboard
2023-10-22T21:25:02Z
200
0
[ "region:us" ]
null
2023-08-18T11:25:24Z
--- pretty_name: Evaluation run of TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__Nous-Hermes-13B-SuperHOT-8K-fp16\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T21:24:49.496203](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Nous-Hermes-13B-SuperHOT-8K-fp16/blob/main/results_2023-10-22T21-24-49.496203.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.24779781879194632,\n\ \ \"em_stderr\": 0.004421358038007316,\n \"f1\": 0.3203208892617463,\n\ \ \"f1_stderr\": 0.004418252169927022,\n \"acc\": 0.3825450746272229,\n\ \ \"acc_stderr\": 0.007568348592873263\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.24779781879194632,\n \"em_stderr\": 0.004421358038007316,\n\ \ \"f1\": 0.3203208892617463,\n \"f1_stderr\": 0.004418252169927022\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.012130401819560273,\n \ \ \"acc_stderr\": 0.003015294242890953\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7529597474348856,\n \"acc_stderr\": 0.012121402942855573\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|arc:challenge|25_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-01T13:07:54.585648.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T21_24_49.496203 path: - '**/details_harness|drop|3_2023-10-22T21-24-49.496203.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T21-24-49.496203.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T21_24_49.496203 path: - '**/details_harness|gsm8k|5_2023-10-22T21-24-49.496203.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T21-24-49.496203.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hellaswag|10_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-01T13:07:54.585648.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-management|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T13:07:54.585648.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_01T13_07_54.585648 path: - '**/details_harness|truthfulqa:mc|0_2023-08-01T13:07:54.585648.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-01T13:07:54.585648.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T21_24_49.496203 path: - '**/details_harness|winogrande|5_2023-10-22T21-24-49.496203.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T21-24-49.496203.parquet' - config_name: results data_files: - split: 2023_08_01T13_07_54.585648 path: - results_2023-08-01T13:07:54.585648.parquet - split: 2023_10_22T21_24_49.496203 path: - results_2023-10-22T21-24-49.496203.parquet - split: latest path: - results_2023-10-22T21-24-49.496203.parquet --- # Dataset Card for Evaluation run of TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__Nous-Hermes-13B-SuperHOT-8K-fp16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T21:24:49.496203](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Nous-Hermes-13B-SuperHOT-8K-fp16/blob/main/results_2023-10-22T21-24-49.496203.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.24779781879194632, "em_stderr": 0.004421358038007316, "f1": 0.3203208892617463, "f1_stderr": 0.004418252169927022, "acc": 0.3825450746272229, "acc_stderr": 0.007568348592873263 }, "harness|drop|3": { "em": 0.24779781879194632, "em_stderr": 0.004421358038007316, "f1": 0.3203208892617463, "f1_stderr": 0.004418252169927022 }, "harness|gsm8k|5": { "acc": 0.012130401819560273, "acc_stderr": 0.003015294242890953 }, "harness|winogrande|5": { "acc": 0.7529597474348856, "acc_stderr": 0.012121402942855573 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3983789384365082, -0.6197428107261658, 0.19911600649356842, 0.11270642280578613, -0.19234150648117065, 0.15305261313915253, -0.3174467384815216, -0.24834473431110382, 0.4704734683036804, 0.4823341965675354, -0.7976309657096863, -0.893691897392273, -0.6560185551643372, 0.16624967753887177, -0.20501400530338287, 1.2302621603012085, -0.23419879376888275, -0.20015279948711395, 0.12417289614677429, -0.3058357536792755, -0.27112501859664917, -0.3851577043533325, -0.4893830716609955, -0.3340786397457123, 0.35260626673698425, 0.6509852409362793, 0.38130897283554077, 0.5832105875015259, 0.7445914149284363, 0.3802250325679779, -0.14790672063827515, 0.22833244502544403, -0.4654191732406616, -0.03940096125006676, 0.18290455639362335, -0.573096752166748, -0.7852880954742432, 0.07480163872241974, 0.7216145396232605, 0.28803110122680664, -0.11147038638591766, 0.652953565120697, 0.0890149474143982, 0.5024474859237671, -0.4728390574455261, 0.36110416054725647, -0.3705322742462158, -0.04626006260514259, -0.3895525634288788, -0.22785767912864685, 0.0030497461557388306, -0.28063565492630005, -0.1525520384311676, -0.6536678671836853, 0.24070978164672852, 0.12392530590295792, 1.1579185724258423, 0.01894778199493885, -0.054104242473840714, -0.19324935972690582, -0.3242518901824951, 0.9847747087478638, -0.9005755186080933, 0.01016579195857048, 0.5607094168663025, 0.16593000292778015, -0.23907743394374847, -0.4972778856754303, -0.434039443731308, -0.1395915299654007, -0.2108604460954666, 0.25676286220550537, 0.016670776531100273, -0.14422839879989624, 0.4171994626522064, 0.7052924036979675, -0.7139918208122253, 0.007253390736877918, -0.5334283709526062, -0.20588350296020508, 1.0778566598892212, 0.44558703899383545, 0.17451807856559753, -0.5001218914985657, -0.35756734013557434, -0.31830352544784546, -0.4706800580024719, 0.19353361427783966, 0.37967947125434875, 0.45356398820877075, -0.7034428715705872, 0.7720573544502258, -0.40305009484291077, 0.516470193862915, -0.022021906450390816, -0.30424046516418457, 0.7844429016113281, -0.6758803129196167, -0.22254082560539246, -0.07646108418703079, 1.0363359451293945, 0.530631959438324, 0.00948051456362009, 0.20522013306617737, -0.26916927099227905, -0.01855981908738613, -0.0019355160184204578, -0.8201355338096619, -0.12600143253803253, 0.42829200625419617, -0.4833609461784363, -0.36521464586257935, 0.20733121037483215, -0.8730165958404541, -0.10723844915628433, -0.23555627465248108, 0.1897820383310318, -0.30109885334968567, -0.4015029966831207, -0.1084260568022728, -0.11702896654605865, 0.3016919791698456, 0.2133113294839859, -0.6964311599731445, 0.3749109208583832, 0.5831453204154968, 0.9800107479095459, -0.01988898031413555, -0.4235915541648865, -0.2858286201953888, -0.18561352789402008, -0.2766180634498596, 0.5102918744087219, -0.2401093691587448, -0.42969945073127747, -0.24292954802513123, 0.34195202589035034, -0.27403050661087036, -0.6478017568588257, 0.7134130001068115, -0.2503926455974579, 0.12014181911945343, -0.2738519310951233, -0.5755434036254883, -0.09019296616315842, 0.37234726548194885, -0.6087161898612976, 1.4419097900390625, 0.3000357151031494, -0.8327734470367432, 0.1727251410484314, -0.8420612812042236, -0.16957665979862213, 0.20536424219608307, -0.07315534353256226, -0.6389948129653931, -0.0650668740272522, 0.15744717419147491, 0.6257290244102478, -0.26263436675071716, -0.0038142232224345207, -0.31226426362991333, -0.4148460030555725, 0.10730364173650742, 0.0281380508095026, 1.0584454536437988, 0.17984504997730255, -0.47003230452537537, 0.1353365182876587, -0.9703583121299744, 0.019449293613433838, 0.39450740814208984, -0.4830327033996582, -0.16814200580120087, -0.38144609332084656, 0.2169216275215149, 0.12919314205646515, 0.46181291341781616, -0.7208520770072937, 0.36556559801101685, -0.21485601365566254, 0.298875093460083, 0.9338718056678772, 0.012025527656078339, 0.3286237418651581, -0.4495117664337158, 0.5738300085067749, -0.03389771282672882, 0.3373212218284607, 0.005745119880884886, -0.5722635388374329, -0.7100998759269714, -0.2704102694988251, 0.15012381970882416, 0.6808898448944092, -0.5474886298179626, 0.711725652217865, -0.31496313214302063, -0.7710573673248291, -0.7079310417175293, 0.13138684630393982, 0.4873439371585846, 0.5205351114273071, 0.4538387358188629, -0.26982346177101135, -0.7109502553939819, -0.9513906836509705, 0.0016959479544311762, -0.13671748340129852, 0.03449384495615959, 0.5805753469467163, 1.0724986791610718, -0.2433481514453888, 0.5706862211227417, -0.6272111535072327, -0.32668617367744446, -0.4557684659957886, 0.04654450714588165, 0.7202256321907043, 0.5410481691360474, 0.5221818685531616, -0.5921049118041992, -0.3825114965438843, -0.011446367017924786, -0.7887141108512878, -0.24165739119052887, -0.21653950214385986, -0.22092078626155853, 0.3705916702747345, -0.053485408425331116, -0.6067981123924255, 0.5514801740646362, 0.5193923115730286, -0.5594446659088135, 0.8147801160812378, -0.06596922874450684, 0.4181513786315918, -1.14899480342865, 0.15791931748390198, 0.12055375427007675, 0.060709286481142044, -0.4462401866912842, -0.02528921514749527, 0.08667390048503876, 0.2810021638870239, -0.4660229980945587, 0.7392178177833557, -0.44783487915992737, -0.20763561129570007, 0.050812333822250366, 0.13061946630477905, -0.046400438994169235, 0.4642273187637329, -0.19338814914226532, 0.6878612637519836, 0.6057876944541931, -0.3316429555416107, 0.469276487827301, 0.45900559425354004, -0.6042819023132324, 0.34604835510253906, -0.5029184818267822, -0.04517126455903053, 0.20735511183738708, 0.2566514313220978, -0.9362674355506897, -0.3283180892467499, 0.3825765550136566, -0.5874305963516235, 0.25245895981788635, -0.2331303507089615, -0.569672703742981, -0.4736652672290802, -0.5644639134407043, 0.2632397413253784, 0.36957231163978577, -0.37739336490631104, 0.2119908481836319, 0.34978702664375305, -0.019151799380779266, -0.6182504296302795, -0.7647437453269958, -0.09874816238880157, -0.2897219955921173, -0.6528533697128296, 0.4191387891769409, -0.17090041935443878, -0.18057622015476227, -0.034589365124702454, -0.1460283398628235, 0.027580905705690384, 0.07564901560544968, 0.328469842672348, 0.7157613635063171, -0.10229291021823883, -0.37410280108451843, -0.2752365171909332, -0.23992092907428741, -0.002742712153121829, 0.1696910560131073, 0.5092186331748962, -0.22653385996818542, -0.17075957357883453, -0.42426636815071106, 0.15582451224327087, 0.5164793729782104, -0.15456010401248932, 0.8303983807563782, 0.6479036211967468, -0.23656406998634338, -0.06514229625463486, -0.4201202690601349, -0.11245492845773697, -0.4687166213989258, 0.28692224621772766, -0.34586301445961, -0.8543278574943542, 0.8909063339233398, 0.321645587682724, 0.2342727780342102, 0.6912164688110352, 0.6048508286476135, -0.023317625746130943, 0.7143091559410095, 0.2085607945919037, -0.10143981128931046, 0.6080569624900818, -0.9374552965164185, 0.0006376588135026395, -1.1075588464736938, -0.3917214274406433, -0.40115663409233093, -0.5009771585464478, -0.782709538936615, -0.3541542589664459, 0.22320333123207092, 0.169956237077713, -0.41484659910202026, 0.5921986103057861, -0.5927940011024475, 0.24250727891921997, 0.6887193918228149, 0.20156289637088776, 0.02618226781487465, -0.0825883075594902, -0.12311489135026932, 0.13204741477966309, -0.46912136673927307, -0.37807056307792664, 1.418675184249878, 0.20960257947444916, 0.6233304738998413, -0.1024014949798584, 0.9853085279464722, 0.1964123398065567, 0.2870532274246216, -0.5089849829673767, 0.758655846118927, -0.024990731850266457, -0.6928824186325073, -0.21850189566612244, -0.5981324911117554, -0.8973245620727539, 0.16745179891586304, -0.15494786202907562, -0.9431735277175903, 0.101050965487957, -0.0027893935330212116, -0.25945380330085754, 0.38993847370147705, -0.5388973951339722, 0.9083386659622192, -0.22383366525173187, -0.44063469767570496, 0.024690968915820122, -0.8462724685668945, 0.33803826570510864, 0.1400696337223053, 0.35216498374938965, -0.31085461378097534, 0.03481370210647583, 1.1245156526565552, -0.6188009977340698, 0.633875846862793, -0.21573102474212646, 0.1568123996257782, 0.4064609110355377, -0.26268741488456726, 0.558454692363739, -0.07607481628656387, -0.20873337984085083, 0.48874202370643616, -0.12265872210264206, -0.36426660418510437, -0.3534787893295288, 0.8612504005432129, -0.8679164052009583, -0.24698346853256226, -0.397146075963974, -0.6549981832504272, 0.26900535821914673, 0.2278812974691391, 0.4363551735877991, 0.43464067578315735, 0.08615028858184814, 0.2676960527896881, 0.255217045545578, -0.24325589835643768, 0.5275909304618835, 0.5087332129478455, -0.1907552033662796, -0.6404334902763367, 0.7779439091682434, 0.4339059889316559, 0.0870777890086174, 0.22147490084171295, 0.08831331133842468, -0.5286059975624084, -0.5201414823532104, -0.46926069259643555, 0.3058711588382721, -0.40519776940345764, -0.37920910120010376, -0.36274099349975586, -0.23905956745147705, -0.459371954202652, -0.10617731511592865, -0.451978474855423, -0.5012850165367126, -0.3632770776748657, -0.3626904785633087, 0.625417172908783, 0.5327385067939758, -0.41497838497161865, 0.279727578163147, -0.8146890997886658, 0.2956344187259674, -0.17974407970905304, 0.3745685815811157, -0.15341150760650635, -0.6659541726112366, -0.46388572454452515, 0.04271364584565163, -0.47214287519454956, -0.90538090467453, 0.5688126087188721, 0.0062214103527367115, 0.66538405418396, 0.1876026839017868, 0.14152894914150238, 0.747253954410553, -0.21673470735549927, 1.0640922784805298, 0.03614543005824089, -0.7213621735572815, 0.8167709112167358, -0.3612902760505676, 0.15582959353923798, 0.5020828247070312, 0.1962112933397293, -0.42926323413848877, -0.4298224151134491, -1.0050969123840332, -1.1665257215499878, 1.116490364074707, 0.5685120224952698, -0.28493961691856384, 0.12640231847763062, 0.31239059567451477, -0.14482365548610687, 0.1118430644273758, -0.6591984629631042, -0.7700471878051758, -0.08041311055421829, -0.2701471745967865, -0.07457813620567322, -0.05306828022003174, -0.36220574378967285, -0.4924928843975067, 0.8947714567184448, 0.08798357099294662, 0.4183107614517212, 0.25817587971687317, -0.04932292923331261, -0.09915251284837723, 0.28369611501693726, 0.4894813001155853, 0.7042311429977417, -0.46004927158355713, -0.11712387949228287, 0.28207141160964966, -0.6212786436080933, 0.04310426861047745, 0.275126576423645, -0.07835263013839722, -0.09512849897146225, 0.632313072681427, 0.9184605479240417, 0.14205513894557953, -0.42881354689598083, 0.5094500780105591, 0.11688053607940674, -0.37566807866096497, -0.3021720349788666, 0.1146647110581398, -0.06444843113422394, 0.43354910612106323, 0.4512423872947693, -0.16427414119243622, 0.06655358523130417, -0.3468807637691498, 0.30688878893852234, 0.293865829706192, -0.03231309354305267, -0.35424211621284485, 0.5931731462478638, -0.05064767971634865, -0.3449283242225647, 0.6410039067268372, -0.17251184582710266, -0.509389340877533, 1.1192988157272339, 0.3443520963191986, 0.8693546652793884, -0.2557849586009979, 0.1444888859987259, 0.6010065674781799, 0.3809066414833069, -0.04393625631928444, 0.6478188037872314, 0.044898539781570435, -0.6630604267120361, -0.30590376257896423, -0.7965309619903564, -0.27549079060554504, 0.3869616687297821, -1.0044951438903809, 0.32197195291519165, -0.22141478955745697, -0.23712843656539917, -0.05113184079527855, 0.5011288523674011, -0.7938336730003357, 0.23159658908843994, 0.09249266237020493, 0.9613717794418335, -1.0433200597763062, 0.6602866053581238, 0.8804073333740234, -0.46663540601730347, -0.9622839689254761, -0.3715909421443939, 0.12597936391830444, -0.8861618638038635, 0.4262083172798157, 0.27673453092575073, 0.37641623616218567, -0.23492717742919922, -0.6314006447792053, -0.9175731539726257, 1.4554450511932373, 0.24431800842285156, -0.6475407481193542, 0.2634739875793457, 0.09333965182304382, 0.3905836343765259, -0.3383414149284363, 0.6333554983139038, 0.746427595615387, 0.6811360716819763, 0.01770498789846897, -0.9575996994972229, 0.2812761068344116, -0.5017109513282776, -0.11048678308725357, 0.28260794281959534, -0.9037793278694153, 0.9108299612998962, -0.2276928573846817, -0.049484916031360626, -0.008893181569874287, 0.3342360258102417, 0.6332689523696899, 0.3785047233104706, 0.3480846881866455, 0.8609963059425354, 0.5680408477783203, -0.332800030708313, 1.0590851306915283, -0.35629981756210327, 0.9486062526702881, 1.0840522050857544, 0.03800779581069946, 0.7981133460998535, 0.32775574922561646, -0.504634439945221, 0.556387186050415, 0.8532652854919434, -0.3650439977645874, 0.4779907763004303, 0.1370231807231903, -0.06824236363172531, -0.04323102906346321, 0.05246604606509209, -0.49735358357429504, 0.3699210286140442, 0.2324974089860916, -0.5198878049850464, -0.19934293627738953, -0.30745619535446167, 0.11215530335903168, -0.3938750922679901, -0.3489719331264496, 0.599774956703186, -0.018628181889653206, -0.38330790400505066, 0.696135938167572, -0.15006525814533234, 0.6787851452827454, -0.6508439779281616, -0.08487296849489212, -0.34534752368927, 0.2432265728712082, -0.5969079732894897, -1.0248804092407227, 0.3264923393726349, 0.0675523653626442, -0.1675957590341568, -0.2449716329574585, 0.6418548822402954, -0.2536443769931793, -0.6415960192680359, 0.47602394223213196, 0.3407786786556244, 0.37383678555488586, 0.11227118223905563, -1.0336713790893555, 0.26486101746559143, 0.19181083142757416, -0.8240118026733398, 0.38962438702583313, 0.2652645707130432, 0.17756222188472748, 0.5146631598472595, 0.6935709714889526, 0.11187485605478287, 0.15832826495170593, -0.08897887915372849, 1.0533411502838135, -0.7752326130867004, -0.3732243776321411, -0.8945837616920471, 0.9117502570152283, -0.2781527638435364, -0.6676901578903198, 0.9399920701980591, 0.9933008551597595, 0.8481369018554688, 0.13918721675872803, 0.778849720954895, -0.4084523022174835, 0.553717851638794, -0.2772247791290283, 0.7807052731513977, -0.8002337217330933, 0.2613782286643982, -0.2971954941749573, -0.8408986926078796, -0.14100928604602814, 0.7466301321983337, -0.2973162531852722, -0.08527223020792007, 0.49022725224494934, 1.0450786352157593, 0.014495794661343098, 0.18144318461418152, 0.021249018609523773, 0.4716651737689972, 0.2047492414712906, 0.6703968048095703, 0.6489899754524231, -0.7407479286193848, 0.4596816599369049, -0.5854046940803528, -0.5300772786140442, -0.2213466763496399, -0.6496015191078186, -0.6727311015129089, -0.4334337115287781, -0.37242391705513, -0.4971373677253723, -0.021860063076019287, 0.9286460280418396, 0.5633910894393921, -0.982835590839386, -0.44288313388824463, -0.05439120903611183, 0.1480986624956131, -0.23459357023239136, -0.3524990379810333, 0.565787672996521, -0.09190606325864792, -0.7800421118736267, 0.32348111271858215, 0.07806424796581268, -0.13407686352729797, -0.0028581018559634686, -0.2380637675523758, -0.33635491132736206, -0.20546448230743408, 0.426451176404953, 0.21675097942352295, -0.6289895176887512, -0.24710801243782043, -0.059040676802396774, 0.01628502644598484, 0.27030277252197266, 0.3393891453742981, -0.4875771701335907, 0.08585912734270096, 0.5739902853965759, 0.28732019662857056, 0.7319608330726624, -0.0888872742652893, 0.19192548096179962, -0.6929135322570801, -0.0466296449303627, 0.01046453882008791, 0.5648764967918396, 0.18697836995124817, -0.38083046674728394, 0.9166077375411987, 0.3989153504371643, -0.780910074710846, -0.9725565314292908, -0.19369283318519592, -1.1895740032196045, 0.026255782693624496, 1.3535789251327515, -0.36395463347435, -0.2801516652107239, 0.04497842118144035, -0.20290519297122955, 0.31107544898986816, -0.722047746181488, 0.5287817120552063, 0.6738266944885254, -0.42681440711021423, -0.13782094419002533, -0.6749210953712463, 0.33301281929016113, 0.15149745345115662, -0.8873506188392639, -0.012988202273845673, 0.31582266092300415, 0.4038960635662079, 0.31243520975112915, 0.7379505038261414, -0.04969098046422005, -0.07127909362316132, -0.021107720211148262, 0.21062232553958893, -0.10811992734670639, -0.15192781388759613, -0.181731715798378, 0.10684944689273834, -0.4767277240753174, -0.5428456664085388 ]
open-llm-leaderboard/details_TheBloke__llama-30b-supercot-SuperHOT-8K-fp16
open-llm-leaderboard
2023-08-27T12:33:32Z
200
0
[ "region:us" ]
null
2023-08-18T11:25:33Z
--- pretty_name: Evaluation run of TheBloke/llama-30b-supercot-SuperHOT-8K-fp16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/llama-30b-supercot-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/llama-30b-supercot-SuperHOT-8K-fp16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__llama-30b-supercot-SuperHOT-8K-fp16\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-08-01T15:49:06.725548](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__llama-30b-supercot-SuperHOT-8K-fp16/blob/main/results_2023-08-01T15%3A49%3A06.725548.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23555810672922425,\n\ \ \"acc_stderr\": 0.030884313140032385,\n \"acc_norm\": 0.2365942449124113,\n\ \ \"acc_norm_stderr\": 0.030895609482077938,\n \"mc1\": 0.2350061199510404,\n\ \ \"mc1_stderr\": 0.014843061507731608,\n \"mc2\": 0.4704454489388094,\n\ \ \"mc2_stderr\": 0.016777097412683316\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.22866894197952217,\n \"acc_stderr\": 0.012272853582540806,\n\ \ \"acc_norm\": 0.25853242320819114,\n \"acc_norm_stderr\": 0.012794553754288686\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2740489942242581,\n\ \ \"acc_stderr\": 0.004451222241494048,\n \"acc_norm\": 0.3053176658036248,\n\ \ \"acc_norm_stderr\": 0.004596006250433552\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n\ \ \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n\ \ \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.18421052631578946,\n \"acc_stderr\": 0.0315469804508223,\n\ \ \"acc_norm\": 0.18421052631578946,\n \"acc_norm_stderr\": 0.0315469804508223\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.32,\n\ \ \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \ \ \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n\ \ \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.04093601807403326,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.04093601807403326\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n\ \ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n\ \ \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.21428571428571427,\n \"acc_stderr\": 0.021132859182754444,\n \"\ acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.021132859182754444\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.24838709677419354,\n \"acc_stderr\": 0.02458002892148101,\n \"\ acc_norm\": 0.24838709677419354,\n \"acc_norm_stderr\": 0.02458002892148101\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.18226600985221675,\n \"acc_stderr\": 0.02716334085964515,\n \"\ acc_norm\": 0.18226600985221675,\n \"acc_norm_stderr\": 0.02716334085964515\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.18181818181818182,\n \"acc_stderr\": 0.027479603010538794,\n \"\ acc_norm\": 0.18181818181818182,\n \"acc_norm_stderr\": 0.027479603010538794\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.24352331606217617,\n \"acc_stderr\": 0.030975436386845426,\n\ \ \"acc_norm\": 0.24352331606217617,\n \"acc_norm_stderr\": 0.030975436386845426\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.2205128205128205,\n \"acc_stderr\": 0.021020672680827912,\n\ \ \"acc_norm\": 0.2205128205128205,\n \"acc_norm_stderr\": 0.021020672680827912\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02534809746809783,\n \ \ \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02534809746809783\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.02702543349888238,\n\ \ \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.02702543349888238\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.17880794701986755,\n \"acc_stderr\": 0.031287448506007245,\n \"\ acc_norm\": 0.17880794701986755,\n \"acc_norm_stderr\": 0.031287448506007245\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.1761467889908257,\n \"acc_stderr\": 0.01633288239343138,\n \"\ acc_norm\": 0.1761467889908257,\n \"acc_norm_stderr\": 0.01633288239343138\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.18981481481481483,\n \"acc_stderr\": 0.026744714834691926,\n \"\ acc_norm\": 0.18981481481481483,\n \"acc_norm_stderr\": 0.026744714834691926\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n\ \ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.32286995515695066,\n\ \ \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.32286995515695066,\n\ \ \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.2085889570552147,\n \"acc_stderr\": 0.03192193448934722,\n\ \ \"acc_norm\": 0.2085889570552147,\n \"acc_norm_stderr\": 0.03192193448934722\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\ \ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\ \ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n\ \ \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n\ \ \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23116219667943805,\n\ \ \"acc_stderr\": 0.015075523238101091,\n \"acc_norm\": 0.23116219667943805,\n\ \ \"acc_norm_stderr\": 0.015075523238101091\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\ \ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.25163398692810457,\n \"acc_stderr\": 0.0248480182638752,\n\ \ \"acc_norm\": 0.25163398692810457,\n \"acc_norm_stderr\": 0.0248480182638752\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\ \ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\ \ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.2191358024691358,\n \"acc_stderr\": 0.0230167056402622,\n\ \ \"acc_norm\": 0.2191358024691358,\n \"acc_norm_stderr\": 0.0230167056402622\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.23049645390070922,\n \"acc_stderr\": 0.025123739226872405,\n \ \ \"acc_norm\": 0.23049645390070922,\n \"acc_norm_stderr\": 0.025123739226872405\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\ \ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\ \ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\ \ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n\ \ \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n\ \ \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n\ \ \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\ \ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\ \ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\ \ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.2350061199510404,\n \"mc1_stderr\": 0.014843061507731608,\n\ \ \"mc2\": 0.4704454489388094,\n \"mc2_stderr\": 0.016777097412683316\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/llama-30b-supercot-SuperHOT-8K-fp16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|arc:challenge|25_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hellaswag|10_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:49:06.725548.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-08-01T15:49:06.725548.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_08_01T15_49_06.725548 path: - '**/details_harness|truthfulqa:mc|0_2023-08-01T15:49:06.725548.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-08-01T15:49:06.725548.parquet' - config_name: results data_files: - split: 2023_08_01T15_49_06.725548 path: - results_2023-08-01T15:49:06.725548.parquet - split: latest path: - results_2023-08-01T15:49:06.725548.parquet --- # Dataset Card for Evaluation run of TheBloke/llama-30b-supercot-SuperHOT-8K-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/llama-30b-supercot-SuperHOT-8K-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/llama-30b-supercot-SuperHOT-8K-fp16](https://huggingface.co/TheBloke/llama-30b-supercot-SuperHOT-8K-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__llama-30b-supercot-SuperHOT-8K-fp16", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-01T15:49:06.725548](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__llama-30b-supercot-SuperHOT-8K-fp16/blob/main/results_2023-08-01T15%3A49%3A06.725548.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23555810672922425, "acc_stderr": 0.030884313140032385, "acc_norm": 0.2365942449124113, "acc_norm_stderr": 0.030895609482077938, "mc1": 0.2350061199510404, "mc1_stderr": 0.014843061507731608, "mc2": 0.4704454489388094, "mc2_stderr": 0.016777097412683316 }, "harness|arc:challenge|25": { "acc": 0.22866894197952217, "acc_stderr": 0.012272853582540806, "acc_norm": 0.25853242320819114, "acc_norm_stderr": 0.012794553754288686 }, "harness|hellaswag|10": { "acc": 0.2740489942242581, "acc_stderr": 0.004451222241494048, "acc_norm": 0.3053176658036248, "acc_norm_stderr": 0.004596006250433552 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.18421052631578946, "acc_stderr": 0.0315469804508223, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.21428571428571427, "acc_stderr": 0.021132859182754444, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.021132859182754444 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24838709677419354, "acc_stderr": 0.02458002892148101, "acc_norm": 0.24838709677419354, "acc_norm_stderr": 0.02458002892148101 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.18226600985221675, "acc_stderr": 0.02716334085964515, "acc_norm": 0.18226600985221675, "acc_norm_stderr": 0.02716334085964515 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.18181818181818182, "acc_stderr": 0.027479603010538794, "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.027479603010538794 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845426, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2205128205128205, "acc_stderr": 0.021020672680827912, "acc_norm": 0.2205128205128205, "acc_norm_stderr": 0.021020672680827912 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2222222222222222, "acc_stderr": 0.02534809746809783, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.02534809746809783 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.02702543349888238, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.02702543349888238 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.17880794701986755, "acc_stderr": 0.031287448506007245, "acc_norm": 0.17880794701986755, "acc_norm_stderr": 0.031287448506007245 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1761467889908257, "acc_stderr": 0.01633288239343138, "acc_norm": 0.1761467889908257, "acc_norm_stderr": 0.01633288239343138 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.18981481481481483, "acc_stderr": 0.026744714834691926, "acc_norm": 0.18981481481481483, "acc_norm_stderr": 0.026744714834691926 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.32286995515695066, "acc_stderr": 0.03138147637575499, "acc_norm": 0.32286995515695066, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2085889570552147, "acc_stderr": 0.03192193448934722, "acc_norm": 0.2085889570552147, "acc_norm_stderr": 0.03192193448934722 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23116219667943805, "acc_stderr": 0.015075523238101091, "acc_norm": 0.23116219667943805, "acc_norm_stderr": 0.015075523238101091 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.0248480182638752, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.0248480182638752 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2191358024691358, "acc_stderr": 0.0230167056402622, "acc_norm": 0.2191358024691358, "acc_norm_stderr": 0.0230167056402622 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23049645390070922, "acc_stderr": 0.025123739226872405, "acc_norm": 0.23049645390070922, "acc_norm_stderr": 0.025123739226872405 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2350061199510404, "mc1_stderr": 0.014843061507731608, "mc2": 0.4704454489388094, "mc2_stderr": 0.016777097412683316 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6994938254356384, -0.8496142029762268, 0.26652398705482483, 0.20135927200317383, -0.17579469084739685, -0.018762636929750443, 0.03275904804468155, -0.2521819472312927, 0.5966906547546387, -0.0641900822520256, -0.53865647315979, -0.6718851923942566, -0.4461277723312378, 0.2656780779361725, -0.07668980211019516, 0.8570079207420349, -0.19686682522296906, -0.13132967054843903, 0.07416290789842606, -0.006673880387097597, -0.23052017390727997, -0.35954853892326355, -0.4609712064266205, -0.3252946436405182, 0.1752208173274994, 0.461178183555603, 0.44937852025032043, 0.7794325947761536, 0.7260500192642212, 0.29455798864364624, -0.3546063005924225, 0.010191570967435837, -0.19781532883644104, -0.27610284090042114, 0.3803398609161377, -0.36896735429763794, -0.8186694383621216, 0.26644426584243774, 0.7332146763801575, 0.5657559633255005, -0.08511948585510254, 0.30186009407043457, -0.013450531288981438, 0.5132544636726379, -0.34411993622779846, 0.043916281312704086, -0.32871273159980774, 0.23664963245391846, -0.1927499622106552, -0.2555328607559204, -0.2927665710449219, -0.19265486299991608, -0.1599433422088623, -0.8989772200584412, 0.3017106056213379, 0.34063100814819336, 1.6006555557250977, -0.18280890583992004, -0.18869194388389587, 0.11830929666757584, -0.09562293440103531, 1.0235005617141724, -0.8792642951011658, 0.37659165263175964, 0.7613706588745117, 0.10964488983154297, -0.18343167006969452, -0.5935670137405396, -0.6680691838264465, 0.043597981333732605, -0.35919806361198425, 0.3571826219558716, -0.04801047593355179, -0.1881961077451706, 0.38264200091362, 0.6859551072120667, -0.6315025091171265, 0.18205663561820984, -0.6139944791793823, -0.2024414837360382, 1.079175591468811, 0.3660488426685333, 0.08989816159009933, -0.3492394983768463, -0.6938948035240173, -0.6281253695487976, -0.44582828879356384, 0.2145073562860489, 0.40268218517303467, 0.3281712830066681, -0.39940986037254333, 0.6908345818519592, -0.3869872987270355, 0.5229202508926392, 0.42995887994766235, 0.022567110136151314, 0.8451703786849976, -0.7340241074562073, -0.5326225161552429, -0.10190337151288986, 1.1090152263641357, 0.6102216839790344, 0.06674788147211075, 0.2133108377456665, 0.014707080088555813, -0.1059492900967598, -0.023787429556250572, -0.8770236372947693, -0.27453094720840454, 0.18730604648590088, -0.35330820083618164, -0.46834853291511536, 0.3041379153728485, -0.850493848323822, 0.14684738218784332, -0.012239743955433369, 0.3617340624332428, -0.5161230564117432, -0.13151651620864868, 0.1998942345380783, -0.42072996497154236, 0.8234720826148987, -0.1501438170671463, -0.8335968255996704, 0.3859407901763916, 0.5189371109008789, 0.769092321395874, -0.08031141757965088, -0.48065081238746643, -0.09186919033527374, -0.09590008109807968, -0.31473010778427124, 0.5596966743469238, -0.29019826650619507, -0.44140294194221497, -0.3131054639816284, 0.3346862494945526, -0.22681373357772827, -0.3714611530303955, 0.6777885556221008, -0.24910372495651245, 0.18853314220905304, -0.4371214807033539, -0.6596152782440186, 0.18043696880340576, 0.394275426864624, -0.39713430404663086, 1.2894623279571533, 0.2607017159461975, -0.8209273815155029, 0.4502847492694855, -0.5831397175788879, -0.14179228246212006, 0.01713106408715248, -0.0514911450445652, -0.7938482165336609, -0.2527121305465698, 0.17873920500278473, 0.4178287982940674, -0.15474259853363037, -0.10042576491832733, -0.3717159330844879, -0.3617180585861206, 0.32445576786994934, -0.1520177125930786, 1.2294577360153198, -0.04523967579007149, -0.7659485936164856, -0.10553261637687683, -1.2102116346359253, 0.2687985599040985, 0.24066978693008423, -0.3248584270477295, -0.1508135199546814, -0.5130120515823364, -0.006714730989187956, 0.15047428011894226, 0.2896951735019684, -0.8466798663139343, 0.30578017234802246, -0.3525182902812958, 0.19236083328723907, 1.2784054279327393, 0.02706959843635559, 0.11530052125453949, -0.5112394690513611, 0.5190461277961731, 0.21482188999652863, 0.22957992553710938, 0.36732202768325806, -0.6011423468589783, -0.8020523190498352, -0.4797237813472748, -0.01927490532398224, 0.5929088592529297, -0.2232237011194229, 1.1007890701293945, 0.092839814722538, -0.896731972694397, -0.4497886300086975, -0.1194959431886673, 0.5276702046394348, 0.7590928673744202, 0.6284111142158508, -0.07145467400550842, -0.6408628225326538, -1.1014362573623657, -0.2748468518257141, -0.15351879596710205, 0.14054061472415924, 0.2220037579536438, 1.0547467470169067, -0.28641459345817566, 0.5678020715713501, -1.0056511163711548, -0.19051887094974518, 0.13273431360721588, -0.09220024943351746, 0.7920800447463989, 0.7253455519676208, 0.562399685382843, -0.672307550907135, -0.5309931635856628, 0.22004011273384094, -0.8604965806007385, -0.09771734476089478, 0.11392008513212204, -0.3075047731399536, 0.15244701504707336, 0.14940877258777618, -0.7257890701293945, 0.5232431888580322, 0.21651971340179443, -1.0611215829849243, 1.1081879138946533, -0.31921228766441345, 0.571807861328125, -0.9747253060340881, 0.16153359413146973, -0.06755669414997101, 0.0427188016474247, -0.5024771690368652, 0.082117959856987, 0.1136120855808258, 0.4707055389881134, -0.5494335889816284, 0.7941107153892517, -0.6804522275924683, -0.05935642495751381, 0.4626421332359314, 0.11657720804214478, -0.09642314910888672, 0.3406948149204254, -0.20841023325920105, 0.8074138760566711, 0.7508824467658997, -0.4234309494495392, 0.5397789478302002, 0.4206002950668335, -0.20937825739383698, 0.7165854573249817, -0.483744353055954, -0.2746413052082062, 0.3323330581188202, -0.010223199613392353, -0.8380254507064819, -0.47893819212913513, 0.018429910764098167, -0.5907959342002869, -0.10386034846305847, 0.4084019064903259, -0.2874706983566284, -0.7967855334281921, -0.9473867416381836, 0.3360300660133362, 0.6528611779212952, -0.4048875570297241, -0.1998373419046402, 0.05867713689804077, 0.10741198062896729, -0.8015396595001221, -0.8821734189987183, -0.4494803547859192, -0.19211100041866302, -0.6634640693664551, 0.3450620770454407, -0.28184905648231506, -0.24484720826148987, -0.10818156599998474, -0.23759447038173676, -0.30219709873199463, -0.021287431940436363, 0.13640166819095612, 0.7101654410362244, -0.3871186673641205, -0.31805920600891113, -0.25753071904182434, -0.19386357069015503, 0.19318203628063202, -0.09842624515295029, 0.3757454752922058, -0.4599565863609314, -0.3769679069519043, -0.47060927748680115, -0.014363903552293777, 0.7159262299537659, -0.050763655453920364, 0.7510775923728943, 0.4175659716129303, -0.29427599906921387, 0.021831747144460678, -0.29896825551986694, -0.2846974730491638, -0.5783182978630066, 0.2738916873931885, -0.496683269739151, -1.0525838136672974, 0.805464506149292, 0.5738770365715027, 0.07888932526111603, 1.1469279527664185, 0.6114757061004639, -0.29997482895851135, 1.0112357139587402, 0.040676653385162354, 0.3361184895038605, 0.4156534671783447, -0.7482806444168091, 0.10958953201770782, -0.9508548974990845, -0.33799633383750916, -0.5479652881622314, -0.490046888589859, -0.701463520526886, -0.08695860207080841, 0.24030917882919312, 0.15042711794376373, -0.7002864480018616, 0.6325626373291016, -0.8245046138763428, 0.6051996350288391, 0.5949156880378723, 0.25463423132896423, 0.173907071352005, -0.16837017238140106, -0.426253080368042, -0.12641164660453796, -0.47132232785224915, -0.21764899790287018, 1.2334887981414795, 0.25423458218574524, 0.6983657479286194, 0.0448690690100193, 0.8844882845878601, 0.07147357612848282, -0.03736543282866478, -0.549788773059845, 0.685247004032135, 0.08923265337944031, -0.8187354207038879, -0.4203033149242401, -0.5118887424468994, -1.0691133737564087, 0.3627265393733978, -0.1314980685710907, -0.8641626834869385, 0.1156231164932251, 0.04023431986570358, -0.1956264078617096, 0.4852922558784485, -0.5795062780380249, 0.846815288066864, -0.15640336275100708, -0.48769575357437134, 0.10750197619199753, -0.8487585783004761, 0.4416797459125519, 0.21395693719387054, 0.2544422447681427, 0.04039108380675316, 0.24884457886219025, 1.1804903745651245, -0.8338353633880615, 0.416407972574234, 0.07804667204618454, 0.04895839840173721, 0.32488957047462463, -0.15433651208877563, 0.5111107230186462, 0.055817268788814545, 0.0011335575254634023, -0.04699470475316048, 0.30796268582344055, -0.851824939250946, -0.07038354128599167, 0.9259704351425171, -0.9643985629081726, -0.6172517538070679, -0.8378287553787231, -0.5395646095275879, 0.07859093695878983, 0.5738344192504883, 0.39898696541786194, 0.5215023756027222, 0.05872385948896408, 0.4601818919181824, 0.8020337820053101, -0.12582720816135406, 0.5880720615386963, 0.24845372140407562, 0.05851823464035988, -0.6477314233779907, 0.8548762202262878, 0.1355695128440857, 0.3934272825717926, 0.29649844765663147, 0.4149324595928192, -0.5486549139022827, -0.23760464787483215, -0.22844870388507843, 0.4920453727245331, -0.610174298286438, -0.29963138699531555, -0.3426755666732788, -0.39303824305534363, -0.7621691226959229, -0.6417160034179688, -0.31093069911003113, -0.5097901225090027, -0.49055787920951843, -0.5377465486526489, 0.6056976318359375, 0.45567309856414795, -0.36088886857032776, 0.06738273054361343, -0.5030385851860046, 0.26256465911865234, 0.3337634205818176, 0.5313677787780762, -0.38147372007369995, -0.5869383811950684, 0.03640260919928551, -0.13248789310455322, -0.5967226028442383, -0.9393657445907593, 0.34779247641563416, -0.03273514658212662, 0.5191046595573425, 0.5613897442817688, 0.040615856647491455, 0.8864694833755493, -0.21930401027202606, 1.0930590629577637, 0.36731380224227905, -0.8109989762306213, 0.743810772895813, -0.3023609519004822, 0.1776762455701828, 0.6449434161186218, 0.18668484687805176, -0.1917516440153122, -0.7319470047950745, -1.3534324169158936, -0.8258086442947388, 0.6432932615280151, 0.38025015592575073, -0.2565920352935791, 0.022520728409290314, 0.12993395328521729, -0.28646615147590637, -0.17444558441638947, -0.6826757192611694, -0.8423762321472168, -0.1583593338727951, -0.4850834608078003, 0.10340172052383423, 0.05285842716693878, -0.38856256008148193, -0.8245673179626465, 0.9076054096221924, 0.015836698934435844, 0.5764873623847961, 0.49629953503608704, 0.04940800741314888, 0.06500446051359177, 0.45305338501930237, 0.914257824420929, 0.7323215007781982, -0.4589504897594452, 0.43066415190696716, 0.4445875585079193, -1.0615557432174683, 0.47092801332473755, 0.277650386095047, -0.07639434933662415, -0.06080581620335579, 0.5109630823135376, 0.42739009857177734, 0.07974732667207718, -0.24450016021728516, 0.6208608150482178, -0.00043162706424482167, -0.5721122622489929, -0.292206346988678, 0.10268572717905045, -0.11410485208034515, 0.01598736084997654, 0.39307793974876404, -0.20839159190654755, -0.05551065504550934, -0.5090644955635071, 0.4886552691459656, 0.3821622133255005, -0.4646609425544739, -0.16798622906208038, 0.7400073409080505, -0.18030615150928497, -0.19938653707504272, 0.33009225130081177, -0.18185526132583618, -0.6216309666633606, 1.1545584201812744, 0.6251903772354126, 0.6792309284210205, -0.23925243318080902, -0.059919748455286026, 0.8489903211593628, 0.3880246579647064, -0.019670428708195686, 0.4987887442111969, 0.29746779799461365, -0.26664650440216064, 0.15926380455493927, -0.8838364481925964, -0.09276356548070908, 0.15721432864665985, -0.7738288044929504, 0.30946579575538635, -0.5365725755691528, -0.18665896356105804, 0.014673489145934582, 0.4262242913246155, -0.4621654748916626, 0.5510896444320679, -0.4204787015914917, 1.2540487051010132, -0.9826471209526062, 0.674640417098999, 0.7307565808296204, -0.5481536388397217, -1.0486657619476318, -0.539247453212738, 0.014639614149928093, -0.8490814566612244, 0.5695883631706238, -0.03510354831814766, 0.16584090888500214, -0.07473810017108917, -0.7467958331108093, -0.8856773376464844, 1.4081590175628662, -0.018116120249032974, -0.48732706904411316, 0.2548023462295532, -0.0733862891793251, 0.4805123805999756, 0.12350005656480789, 0.6468139290809631, 0.7642861008644104, 0.7883748412132263, -0.06390085816383362, -0.7584710717201233, 0.32860127091407776, -0.49296754598617554, -0.2995472550392151, 0.44933050870895386, -0.9686968326568604, 1.1565676927566528, -0.004756698850542307, 0.18862509727478027, -0.17840313911437988, 0.6373400688171387, 0.8288747072219849, 0.28318309783935547, 0.3413390815258026, 0.9169818758964539, 0.8016313910484314, -0.4835546612739563, 1.018192172050476, -0.22730404138565063, 0.9039743542671204, 0.6850597858428955, 0.2085917592048645, 0.7948142290115356, 0.688981831073761, -0.6131433844566345, 0.5769593715667725, 0.818638265132904, -0.29865002632141113, 0.38860997557640076, 0.2635010778903961, -0.08564960211515427, -0.12452080100774765, 0.3949512243270874, -0.8774408102035522, 0.09845028072595596, 0.07850711047649384, -0.33416447043418884, 0.10198570787906647, -0.46281787753105164, 0.3116675019264221, -0.12580814957618713, -0.062007635831832886, 0.370059072971344, 0.03535885736346245, -0.4182005822658539, 0.9399292469024658, -0.1989831179380417, 0.761394739151001, -0.5262016654014587, -0.07430552691221237, -0.4223880469799042, 0.5527806878089905, -0.46457093954086304, -1.0746560096740723, 0.18013906478881836, 0.07573167234659195, -0.09703920036554337, -0.19596943259239197, 0.6913557052612305, -0.1663568764925003, -0.7928669452667236, 0.14528492093086243, 0.024957802146673203, 0.11281946301460266, 0.5458487272262573, -0.6826878786087036, -0.31585007905960083, -0.0646328330039978, -0.5708346962928772, 0.13846826553344727, 0.29880890250205994, 0.2855503261089325, 0.5315978527069092, 0.6704882979393005, 0.16552884876728058, 0.40431585907936096, -0.5252121090888977, 0.8197908997535706, -1.0612967014312744, -0.7093408107757568, -0.9390411376953125, 0.46254608035087585, -0.3343040347099304, -0.8950955271720886, 1.0005220174789429, 1.0342382192611694, 0.8857125043869019, 0.0008609227370470762, 0.6039426922798157, -0.37236517667770386, 0.25158169865608215, -0.37666192650794983, 0.8887980580329895, -0.8488327264785767, -0.2030053734779358, -0.27663692831993103, -0.6936721801757812, -0.41166821122169495, 0.8202265501022339, -0.17810240387916565, 0.008935447782278061, 1.045938491821289, 0.7114585041999817, -0.09841137379407883, 0.061877720057964325, -0.04182678833603859, 0.6094653606414795, 0.37921878695487976, 0.9992609620094299, 0.6747158169746399, -0.7729157209396362, 0.34374797344207764, -0.4858267307281494, -0.44202080368995667, -0.4244155287742615, -0.4779970943927765, -0.8093069791793823, -0.44630536437034607, -0.21713531017303467, -0.591886043548584, -0.12012122571468353, 0.9596894979476929, 0.474249005317688, -0.9662949442863464, -0.4072347581386566, -0.17387601733207703, 0.17164769768714905, -0.582783043384552, -0.4166904389858246, 0.7139174938201904, -0.09386270493268967, -0.5323379635810852, 0.16321821510791779, -0.0677308738231659, 0.21320417523384094, 0.0809854194521904, -0.38934454321861267, -0.7313911318778992, 0.023285141214728355, 0.43953272700309753, 0.3403955399990082, -0.7016764283180237, -0.7314540147781372, 0.3471025824546814, -0.4854625463485718, 0.4125581979751587, -0.02133849635720253, -0.4953206479549408, 0.05116306617856026, 0.6744473576545715, 0.4825030267238617, 0.6848506331443787, -0.07081080228090286, 0.059676457196474075, -0.6627776026725769, 0.15542221069335938, -0.010933753103017807, 0.2849559783935547, -0.03889372944831848, -0.3284183442592621, 0.7584455013275146, 0.7154364585876465, -0.5496589541435242, -1.1051030158996582, -0.41402819752693176, -1.4017716646194458, -0.010485116392374039, 1.118101716041565, -0.015447048470377922, -0.48904839158058167, 0.24277758598327637, -0.14298005402088165, 0.1819937527179718, -0.29887059330940247, 0.7390219569206238, 0.7996334433555603, -0.406870573759079, 0.07293687760829926, -0.6141289472579956, 0.33237335085868835, 0.5398465991020203, -1.1747294664382935, -0.06562834233045578, 0.227163165807724, 0.30402618646621704, 0.3878122866153717, 0.6429041624069214, -0.09075100719928741, 0.27159303426742554, 0.2513781785964966, 0.01655823551118374, 0.014763673767447472, 0.025764141231775284, -0.1948009878396988, 0.07710536569356918, -0.2636576294898987, -0.4370618760585785 ]
open-llm-leaderboard/details_TheBloke__koala-13B-HF
open-llm-leaderboard
2023-10-22T08:43:50Z
200
0
[ "region:us" ]
null
2023-08-18T11:28:27Z
--- pretty_name: Evaluation run of TheBloke/koala-13B-HF dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [TheBloke/koala-13B-HF](https://huggingface.co/TheBloke/koala-13B-HF) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__koala-13B-HF\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T08:43:38.346498](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__koala-13B-HF/blob/main/results_2023-10-22T08-43-38.346498.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.021707214765100673,\n\ \ \"em_stderr\": 0.0014923686874006184,\n \"f1\": 0.09106753355704705,\n\ \ \"f1_stderr\": 0.0020580604985252385,\n \"acc\": 0.40428250097386687,\n\ \ \"acc_stderr\": 0.009634029824810052\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.021707214765100673,\n \"em_stderr\": 0.0014923686874006184,\n\ \ \"f1\": 0.09106753355704705,\n \"f1_stderr\": 0.0020580604985252385\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06823351023502654,\n \ \ \"acc_stderr\": 0.006945358944067431\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552673\n\ \ }\n}\n```" repo_url: https://huggingface.co/TheBloke/koala-13B-HF leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:49:04.838102.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T08_43_38.346498 path: - '**/details_harness|drop|3_2023-10-22T08-43-38.346498.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T08-43-38.346498.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T08_43_38.346498 path: - '**/details_harness|gsm8k|5_2023-10-22T08-43-38.346498.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T08-43-38.346498.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hellaswag|10_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:49:04.838102.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:49:04.838102.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_49_04.838102 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:49:04.838102.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:49:04.838102.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T08_43_38.346498 path: - '**/details_harness|winogrande|5_2023-10-22T08-43-38.346498.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T08-43-38.346498.parquet' - config_name: results data_files: - split: 2023_07_19T18_49_04.838102 path: - results_2023-07-19T18:49:04.838102.parquet - split: 2023_10_22T08_43_38.346498 path: - results_2023-10-22T08-43-38.346498.parquet - split: latest path: - results_2023-10-22T08-43-38.346498.parquet --- # Dataset Card for Evaluation run of TheBloke/koala-13B-HF ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/koala-13B-HF - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/koala-13B-HF](https://huggingface.co/TheBloke/koala-13B-HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__koala-13B-HF", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T08:43:38.346498](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__koala-13B-HF/blob/main/results_2023-10-22T08-43-38.346498.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.021707214765100673, "em_stderr": 0.0014923686874006184, "f1": 0.09106753355704705, "f1_stderr": 0.0020580604985252385, "acc": 0.40428250097386687, "acc_stderr": 0.009634029824810052 }, "harness|drop|3": { "em": 0.021707214765100673, "em_stderr": 0.0014923686874006184, "f1": 0.09106753355704705, "f1_stderr": 0.0020580604985252385 }, "harness|gsm8k|5": { "acc": 0.06823351023502654, "acc_stderr": 0.006945358944067431 }, "harness|winogrande|5": { "acc": 0.7403314917127072, "acc_stderr": 0.012322700705552673 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3914225399494171, -0.7976207733154297, 0.18157045543193817, 0.3633158504962921, -0.18883436918258667, 0.20217680931091309, -0.30855193734169006, -0.2259659618139267, 0.445546418428421, 0.5513926148414612, -0.6826622486114502, -0.8946661949157715, -0.6551975607872009, 0.29117244482040405, -0.08868137001991272, 1.0073496103286743, -0.19685009121894836, -0.37093889713287354, 0.012272619642317295, -0.3287159502506256, -0.49639904499053955, -0.5303539037704468, -0.4582003653049469, -0.5193580389022827, 0.43831995129585266, 0.6031063199043274, 0.40684640407562256, 0.6829342246055603, 0.5727007985115051, 0.3736065626144409, -0.2151123434305191, 0.23013091087341309, -0.40196719765663147, -0.024234281852841377, 0.18891334533691406, -0.5591832399368286, -0.6481618285179138, -0.00664864107966423, 0.6863162517547607, 0.4911324977874756, -0.22463247179985046, 0.6172839403152466, 0.09618422389030457, 0.6287489533424377, -0.59796541929245, 0.44474777579307556, -0.3877815902233124, 0.020736033096909523, -0.3420889973640442, -0.1410418301820755, -0.09169449657201767, -0.35801297426223755, -0.3147660493850708, -0.6398739814758301, 0.014410625211894512, 0.18531976640224457, 1.142173409461975, 0.06220758333802223, -0.2218172699213028, -0.2704809010028839, -0.15950968861579895, 0.895368218421936, -0.982376754283905, -0.04657089337706566, 0.8238762617111206, 0.08888460695743561, -0.31919586658477783, -0.5020481944084167, -0.336518794298172, -0.08058483153581619, -0.31997519731521606, 0.12284399569034576, 0.003064708085730672, -0.17449617385864258, 0.3570949137210846, 0.6259963512420654, -0.6385785341262817, 0.01335250586271286, -0.5270549058914185, -0.3581635355949402, 1.0133535861968994, 0.4060418903827667, 0.04788584262132645, -0.5311256647109985, -0.22881226241588593, -0.2960262894630432, -0.4003152549266815, 0.3060208857059479, 0.4597029983997345, 0.560337245464325, -0.6039478778839111, 0.8758062720298767, -0.5277289748191833, 0.5483415126800537, 0.018865780904889107, -0.28069913387298584, 0.8468806147575378, -0.6478306651115417, -0.22686433792114258, 0.16492922604084015, 1.0943701267242432, 0.3938829004764557, 0.010583707131445408, 0.18418844044208527, -0.22095994651317596, -0.06449826806783676, 0.030476083979010582, -0.8375247120857239, -0.16349749267101288, 0.39174869656562805, -0.5339977145195007, -0.45675423741340637, 0.11271359771490097, -0.9555917382240295, -0.17476411163806915, -0.25352442264556885, 0.24707433581352234, -0.22198602557182312, -0.5053871870040894, -0.11360310763120651, -0.07197370380163193, 0.21825358271598816, 0.04375055432319641, -0.6491886377334595, 0.3446202278137207, 0.543372631072998, 1.0267175436019897, 0.02548915147781372, -0.3899317681789398, -0.43748000264167786, -0.1861070692539215, -0.36173495650291443, 0.44654789566993713, -0.32711261510849, -0.5360487699508667, -0.14445051550865173, 0.40922486782073975, -0.3211870789527893, -0.6951419711112976, 0.6473173499107361, -0.3018200397491455, 0.005401191767305136, -0.37423035502433777, -0.4152844250202179, -0.18821336328983307, 0.3937804698944092, -0.6500110626220703, 1.5070557594299316, 0.4265022277832031, -0.8650655746459961, 0.15210947394371033, -0.9271683096885681, -0.34340354800224304, 0.019228262826800346, 0.15856003761291504, -0.698304295539856, -0.1999090015888214, 0.1406468152999878, 0.4609103202819824, -0.1317354142665863, 0.05081162974238396, -0.4039098620414734, -0.37336254119873047, 0.1458883136510849, 0.0692395567893982, 1.1433004140853882, 0.28032973408699036, -0.37017399072647095, 0.10199892520904541, -1.035882830619812, 0.17052696645259857, 0.3812975287437439, -0.6169816255569458, -0.28741833567619324, -0.35832175612449646, 0.13785818219184875, 0.09344176203012466, 0.528657853603363, -0.5884013772010803, 0.37060341238975525, -0.14892059564590454, 0.3723939061164856, 0.8806577920913696, 0.07533814758062363, 0.3434891104698181, -0.36769410967826843, 0.5198895335197449, 0.11740235984325409, 0.31204095482826233, 0.03444689139723778, -0.5739530920982361, -0.6668644547462463, -0.23897458612918854, 0.21378366649150848, 0.659087598323822, -0.36455389857292175, 0.7802576422691345, -0.2532697319984436, -0.6748344898223877, -0.7534802556037903, 0.20084713399410248, 0.38289764523506165, 0.525314211845398, 0.3626548945903778, -0.20846101641654968, -0.7573404908180237, -0.9661826491355896, 0.04639992117881775, -0.18876013159751892, 0.20640569925308228, 0.5045806765556335, 1.017159104347229, -0.3725818693637848, 0.7562121748924255, -0.6207151412963867, -0.2779470682144165, -0.18352031707763672, 0.15664570033550262, 0.6944636106491089, 0.5851824879646301, 0.42824113368988037, -0.6405021548271179, -0.419924259185791, 0.09138904511928558, -0.9011919498443604, -0.2898944020271301, -0.06561388820409775, -0.34973254799842834, 0.38981205224990845, -0.15056808292865753, -0.683678925037384, 0.5987625122070312, 0.45978042483329773, -0.6068063378334045, 0.7702507972717285, -0.05421294644474983, 0.4229390621185303, -1.134890079498291, 0.13571065664291382, -0.1073252335190773, 0.02182389236986637, -0.3390352725982666, -0.08830712735652924, 0.0654456838965416, 0.22545602917671204, -0.31445273756980896, 0.6471116542816162, -0.5065604448318481, -0.26359081268310547, 0.011355940252542496, 0.05287689343094826, -0.06349081546068192, 0.5343927145004272, -0.2666274607181549, 0.7930129170417786, 0.5614234209060669, -0.3732866644859314, 0.39032530784606934, 0.5557671189308167, -0.5867238640785217, 0.211784228682518, -0.47287115454673767, 0.05925356596708298, 0.29996636509895325, 0.15541625022888184, -0.8290265798568726, -0.45471084117889404, 0.5330348610877991, -0.5710882544517517, 0.1899520754814148, -0.2535059452056885, -0.6842204928398132, -0.49930015206336975, -0.413810133934021, 0.26905012130737305, 0.5520247220993042, -0.4010399878025055, 0.3133648633956909, 0.32087311148643494, -0.10596015304327011, -0.675046980381012, -0.7167441248893738, 0.0225304514169693, -0.3768869936466217, -0.6040202379226685, 0.23639990389347076, -0.1527782529592514, -0.2548055052757263, 0.04327912628650665, -0.04601162299513817, -0.092017263174057, 0.17188283801078796, 0.388639897108078, 0.4412328600883484, -0.12367232143878937, -0.30383986234664917, -0.12987080216407776, 0.007943138480186462, 0.09704992175102234, 0.23275138437747955, 0.6248005032539368, -0.25838372111320496, -0.1973934918642044, -0.2837029993534088, 0.24763672053813934, 0.5032486319541931, -0.2691306471824646, 0.8634967803955078, 0.6572223901748657, -0.22210021317005157, 0.029486818239092827, -0.39422398805618286, 0.04580022394657135, -0.47905054688453674, 0.3119124174118042, -0.3686571717262268, -0.7479692697525024, 0.9110218286514282, 0.23106838762760162, 0.05289671570062637, 0.6141923666000366, 0.5918610095977783, 0.10267679393291473, 0.868163526058197, 0.25643837451934814, -0.10869940370321274, 0.48473960161209106, -0.7312602400779724, -0.010557749308645725, -1.0735762119293213, -0.4817609190940857, -0.4754641354084015, -0.303810179233551, -0.7635544538497925, -0.17861659824848175, 0.1654001623392105, 0.3280523121356964, -0.532453179359436, 0.5160390138626099, -0.6620813012123108, 0.1699046492576599, 0.7214788794517517, 0.2950666844844818, 0.09060073643922806, -0.02316269464790821, -0.14973846077919006, 0.3185186982154846, -0.54546058177948, -0.4726574420928955, 1.3729077577590942, 0.20031125843524933, 0.5947834849357605, -0.10622730106115341, 0.9942633509635925, 0.3298344016075134, 0.3470058739185333, -0.6321664452552795, 0.6303964853286743, 0.1670277863740921, -0.65127032995224, -0.2805024981498718, -0.5277456641197205, -0.9434035420417786, 0.18254484236240387, -0.06222853809595108, -0.9530056118965149, 0.13115659356117249, 0.037371620535850525, -0.048868197947740555, 0.3109685480594635, -0.6244742274284363, 0.884060263633728, -0.18536794185638428, -0.4794680178165436, 0.014927552081644535, -0.8014254570007324, 0.44167226552963257, -0.09736354649066925, 0.4726007878780365, -0.2893703281879425, 0.012021427042782307, 1.0906739234924316, -0.7775665521621704, 0.7999972701072693, -0.22854390740394592, 0.0944565162062645, 0.557601809501648, -0.26328980922698975, 0.643574059009552, -0.1031038910150528, -0.3532401919364929, 0.47513559460639954, -0.004106868524104357, -0.41879621148109436, -0.24160853028297424, 0.7943634390830994, -0.8811311721801758, -0.3207264542579651, -0.4537496864795685, -0.7112511396408081, 0.2049063891172409, 0.30042460560798645, 0.33363232016563416, 0.33594197034835815, 0.1704796999692917, 0.31263914704322815, 0.1512509435415268, -0.1676349937915802, 0.5125681757926941, 0.40825900435447693, -0.24584203958511353, -0.7395439743995667, 0.7078034281730652, 0.3337181508541107, 0.06799129396677017, 0.128475621342659, 0.1114705502986908, -0.5798395872116089, -0.37271231412887573, -0.4054223895072937, 0.2894386053085327, -0.6037277579307556, -0.35486555099487305, -0.420805960893631, -0.19515632092952728, -0.44193416833877563, -0.016498319804668427, -0.3302575647830963, -0.36784157156944275, -0.4351228177547455, -0.2196621149778366, 0.6781054139137268, 0.5725616812705994, -0.44102996587753296, 0.39388182759284973, -0.8363536596298218, 0.09539075195789337, -0.17711059749126434, 0.374994695186615, -0.04191834479570389, -0.5443599224090576, -0.49512332677841187, 0.26616811752319336, -0.5106514692306519, -0.9134243130683899, 0.6241057515144348, 0.022254180163145065, 0.6603911519050598, 0.15478606522083282, 0.24251717329025269, 0.8168637156486511, -0.23068532347679138, 1.028896450996399, 0.10516670346260071, -0.6866587400436401, 0.7967604398727417, -0.31469258666038513, 0.1794872134923935, 0.44843631982803345, 0.1738467663526535, -0.3484772741794586, -0.282624751329422, -0.8109339475631714, -1.0267949104309082, 0.9954483509063721, 0.6266260147094727, -0.34007570147514343, 0.08354019373655319, 0.28624871373176575, -0.02923198789358139, 0.19433443248271942, -0.6794207692146301, -0.6107616424560547, -0.28987956047058105, -0.3386279344558716, -0.08795319497585297, -0.0214705690741539, -0.44016486406326294, -0.48750045895576477, 0.8697373270988464, -0.002165593206882477, 0.43905383348464966, 0.14954258501529694, -0.09437327086925507, -0.1944471299648285, 0.31359630823135376, 0.44571778178215027, 0.7987080812454224, -0.44788745045661926, -0.1311643421649933, 0.26728948950767517, -0.5920287370681763, 0.1501241773366928, 0.22127486765384674, -0.07505617290735245, -0.021829746663570404, 0.6514822840690613, 0.9607861042022705, 0.13053157925605774, -0.4761645495891571, 0.44965770840644836, 0.22280117869377136, -0.39231371879577637, -0.41979995369911194, 0.058047108352184296, 0.030357901006937027, 0.3662748336791992, 0.47445812821388245, -0.14882268011569977, 0.02607077546417713, -0.4185470640659332, 0.16903379559516907, 0.26699402928352356, -0.006522210780531168, -0.3554408848285675, 0.5008341670036316, -0.11062009632587433, -0.539817750453949, 0.7391352653503418, -0.123287633061409, -0.6003393530845642, 1.1768659353256226, 0.4410610496997833, 0.8043646812438965, -0.13948021829128265, 0.09996795654296875, 0.688242495059967, 0.30477428436279297, -0.05874020606279373, 0.5386654138565063, -0.08800105005502701, -0.5339601039886475, -0.2851547598838806, -0.9028391242027283, -0.17790871858596802, 0.35577917098999023, -1.0808563232421875, 0.2540867328643799, -0.09453330934047699, -0.3542858064174652, -0.144453763961792, 0.3746056854724884, -0.7481561303138733, 0.17207986116409302, -0.04164326190948486, 0.9043130874633789, -1.0213496685028076, 0.5705366134643555, 0.9252665638923645, -0.5515449643135071, -0.9561770558357239, -0.37075191736221313, -0.06150902435183525, -0.8871236443519592, 0.5987817645072937, 0.3171597421169281, 0.45764660835266113, -0.18149420619010925, -0.778174638748169, -0.9982361197471619, 1.4768797159194946, 0.0709415078163147, -0.6247126460075378, 0.15140216052532196, 0.22366885840892792, 0.24789680540561676, -0.3097603917121887, 0.47801220417022705, 0.7336745858192444, 0.7932988405227661, -0.034130264073610306, -0.9603398442268372, 0.2928864657878876, -0.45294317603111267, -0.11061610281467438, 0.26663655042648315, -0.9220354557037354, 0.8498974442481995, -0.20182907581329346, 0.02991844154894352, 0.07211928069591522, 0.30712878704071045, 0.6739771962165833, 0.46223074197769165, 0.5058746933937073, 0.7370100617408752, 0.6705890893936157, -0.33781278133392334, 0.9435869455337524, -0.33322420716285706, 0.9031549096107483, 1.0028793811798096, -0.000482583767734468, 0.8059361577033997, 0.42053908109664917, -0.5680733323097229, 0.5303757190704346, 1.0803391933441162, -0.2886568009853363, 0.572144091129303, 0.12369835376739502, -0.10120277106761932, -0.18007512390613556, 0.036506351083517075, -0.501681387424469, 0.36930787563323975, 0.2175464928150177, -0.45302414894104004, -0.18125706911087036, -0.18069535493850708, 0.05685291811823845, -0.31796029210090637, -0.3698047697544098, 0.6469170451164246, 0.05725336819887161, -0.5083218812942505, 0.8713471293449402, -0.2976720333099365, 0.6491252183914185, -0.6720044612884521, -0.1683632731437683, -0.31997066736221313, 0.2141060084104538, -0.549329400062561, -0.9822269678115845, 0.25388509035110474, 0.04164843633770943, -0.27134910225868225, 0.0025157169438898563, 0.6400037407875061, -0.28126397728919983, -0.6575875878334045, 0.46112924814224243, 0.2823365330696106, 0.3444819450378418, 0.09657274186611176, -0.9089595079421997, 0.3294735550880432, 0.1454600691795349, -0.7634891271591187, 0.42298057675361633, 0.34342750906944275, 0.044834427535533905, 0.6594516634941101, 0.6912347674369812, 0.12323777377605438, 0.22749194502830505, -0.13872432708740234, 1.0454492568969727, -0.7313354015350342, -0.42149055004119873, -0.7835460901260376, 0.843248724937439, -0.3104405403137207, -0.5051332116127014, 0.8014126420021057, 0.9961318969726562, 0.7378054857254028, 0.0396694540977478, 0.879654586315155, -0.46276646852493286, 0.49613115191459656, -0.3188185393810272, 0.873954176902771, -0.8372539281845093, 0.16684980690479279, -0.1734355390071869, -0.8179259300231934, 0.07923727482557297, 0.6860660910606384, -0.23541410267353058, -0.03238959237933159, 0.5423247814178467, 1.0536062717437744, 0.033084455877542496, 0.13377606868743896, 0.007522809784859419, 0.5111066102981567, 0.21844200789928436, 0.5738171935081482, 0.7259736657142639, -0.6890001893043518, 0.5602561235427856, -0.6731806397438049, -0.41722676157951355, -0.2548465430736542, -0.715006411075592, -0.7825918793678284, -0.5252166390419006, -0.34736430644989014, -0.49791136384010315, 0.04686708003282547, 0.9796164035797119, 0.6199631690979004, -0.928377628326416, -0.436158150434494, -0.05706555396318436, 0.1139213964343071, -0.27833953499794006, -0.3635873794555664, 0.5957071185112, -0.18408533930778503, -0.6382222175598145, 0.26158416271209717, -0.12253844738006592, -0.17973846197128296, 0.10641278326511383, -0.31904780864715576, -0.4145912528038025, -0.2311343401670456, 0.6112774014472961, 0.28293099999427795, -0.7560229897499084, -0.3164069354534149, -0.10207892209291458, 0.03394652158021927, 0.2917358875274658, 0.17537309229373932, -0.5032024383544922, 0.12099415063858032, 0.44705623388290405, 0.344025582075119, 0.7334379553794861, 0.10626203566789627, 0.14592765271663666, -0.5983231663703918, -0.027786409482359886, -0.062177833169698715, 0.48921310901641846, 0.2519264817237854, -0.4815700054168701, 0.9311637282371521, 0.4051951467990875, -0.7967488169670105, -1.0527410507202148, -0.30667993426322937, -1.2550880908966064, -0.012973769567906857, 1.4328501224517822, -0.30628064274787903, -0.41773611307144165, 0.02688256837427616, -0.2847283184528351, 0.42396771907806396, -0.7324704527854919, 0.5391523241996765, 0.8262882232666016, -0.3316420614719391, -0.08263206481933594, -0.6234832406044006, 0.23924502730369568, 0.044394031167030334, -0.9239543676376343, 0.002283666282892227, 0.3357805609703064, 0.5131121277809143, 0.25974249839782715, 0.6215946674346924, 0.005097259301692247, -0.0963250920176506, 0.09330956637859344, 0.19725649058818817, -0.3157893419265747, 0.02856709249317646, -0.12764538824558258, 0.1212591677904129, -0.38253360986709595, -0.5593144297599792 ]
open-llm-leaderboard/details_dvruette__llama-13b-pretrained-sft-do2
open-llm-leaderboard
2023-10-21T22:43:26Z
200
0
[ "region:us" ]
null
2023-08-18T11:36:01Z
--- pretty_name: Evaluation run of dvruette/llama-13b-pretrained-sft-do2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [dvruette/llama-13b-pretrained-sft-do2](https://huggingface.co/dvruette/llama-13b-pretrained-sft-do2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dvruette__llama-13b-pretrained-sft-do2\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T22:43:14.661061](https://huggingface.co/datasets/open-llm-leaderboard/details_dvruette__llama-13b-pretrained-sft-do2/blob/main/results_2023-10-21T22-43-14.661061.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2528313758389262,\n\ \ \"em_stderr\": 0.004451070247505258,\n \"f1\": 0.3196245805369137,\n\ \ \"f1_stderr\": 0.004416910326006887,\n \"acc\": 0.42391092962847055,\n\ \ \"acc_stderr\": 0.010031261264359954\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.2528313758389262,\n \"em_stderr\": 0.004451070247505258,\n\ \ \"f1\": 0.3196245805369137,\n \"f1_stderr\": 0.004416910326006887\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09249431387414708,\n \ \ \"acc_stderr\": 0.007980396874560168\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.755327545382794,\n \"acc_stderr\": 0.012082125654159738\n\ \ }\n}\n```" repo_url: https://huggingface.co/dvruette/llama-13b-pretrained-sft-do2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:57:06.342295.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T22_43_14.661061 path: - '**/details_harness|drop|3_2023-10-21T22-43-14.661061.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T22-43-14.661061.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T22_43_14.661061 path: - '**/details_harness|gsm8k|5_2023-10-21T22-43-14.661061.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T22-43-14.661061.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hellaswag|10_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:57:06.342295.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:57:06.342295.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_57_06.342295 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:57:06.342295.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:57:06.342295.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T22_43_14.661061 path: - '**/details_harness|winogrande|5_2023-10-21T22-43-14.661061.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T22-43-14.661061.parquet' - config_name: results data_files: - split: 2023_07_19T18_57_06.342295 path: - results_2023-07-19T18:57:06.342295.parquet - split: 2023_10_21T22_43_14.661061 path: - results_2023-10-21T22-43-14.661061.parquet - split: latest path: - results_2023-10-21T22-43-14.661061.parquet --- # Dataset Card for Evaluation run of dvruette/llama-13b-pretrained-sft-do2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dvruette/llama-13b-pretrained-sft-do2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dvruette/llama-13b-pretrained-sft-do2](https://huggingface.co/dvruette/llama-13b-pretrained-sft-do2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dvruette__llama-13b-pretrained-sft-do2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T22:43:14.661061](https://huggingface.co/datasets/open-llm-leaderboard/details_dvruette__llama-13b-pretrained-sft-do2/blob/main/results_2023-10-21T22-43-14.661061.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2528313758389262, "em_stderr": 0.004451070247505258, "f1": 0.3196245805369137, "f1_stderr": 0.004416910326006887, "acc": 0.42391092962847055, "acc_stderr": 0.010031261264359954 }, "harness|drop|3": { "em": 0.2528313758389262, "em_stderr": 0.004451070247505258, "f1": 0.3196245805369137, "f1_stderr": 0.004416910326006887 }, "harness|gsm8k|5": { "acc": 0.09249431387414708, "acc_stderr": 0.007980396874560168 }, "harness|winogrande|5": { "acc": 0.755327545382794, "acc_stderr": 0.012082125654159738 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.32445549964904785, -0.6663922667503357, 0.2594689428806305, 0.375389039516449, -0.23570452630519867, 0.24240337312221527, -0.32007986307144165, -0.10543923825025558, 0.3578101098537445, 0.4950196444988251, -0.7711313366889954, -0.801598072052002, -0.750652015209198, 0.1935279667377472, -0.18921706080436707, 1.1299207210540771, -0.39086949825286865, -0.1477324366569519, 0.11257089674472809, -0.1788758784532547, -0.4301069974899292, -0.4424940049648285, -0.43246421217918396, -0.4307763874530792, 0.32276561856269836, 0.6606919765472412, 0.23037783801555634, 0.6186026930809021, 0.7862908840179443, 0.36248981952667236, -0.10168229043483734, 0.16408908367156982, -0.55283123254776, -0.18344120681285858, 0.30762526392936707, -0.6357802152633667, -0.7628524303436279, 0.19699008762836456, 0.6866271495819092, 0.3642111122608185, -0.17340891063213348, 0.5538057684898376, 0.05974820256233215, 0.613430380821228, -0.34115588665008545, 0.33086761832237244, -0.3605157732963562, -0.03261076286435127, -0.33881235122680664, -0.26428231596946716, 0.030873797833919525, -0.3406839072704315, -0.20206332206726074, -0.5213152170181274, 0.17293767631053925, 0.027555353939533234, 0.9950467944145203, 0.3921736776828766, -0.21915560960769653, -0.1463238000869751, -0.3420122563838959, 0.8144634962081909, -0.7743037939071655, 0.035184115171432495, 0.7407156229019165, 0.11217039823532104, -0.2732796370983124, -0.6554701328277588, -0.44170019030570984, -0.26021718978881836, -0.23295824229717255, 0.21677693724632263, -0.05793837085366249, -0.14036375284194946, 0.46610379219055176, 0.6909580230712891, -0.6954988837242126, 0.11599945276975632, -0.5620209574699402, -0.0449565164744854, 1.035322904586792, 0.27728021144866943, 0.03731965273618698, -0.3622303605079651, -0.5465503334999084, -0.3428557515144348, -0.5184321999549866, 0.1943352371454239, 0.4188198745250702, 0.3428876996040344, -0.6395041346549988, 0.730938732624054, -0.43041688203811646, 0.4975211024284363, -0.0004021253844257444, -0.20178306102752686, 0.9205194115638733, -0.5846924185752869, -0.21586987376213074, 0.021438665688037872, 1.074547529220581, 0.36329978704452515, 0.034196484833955765, 0.1484750509262085, -0.31459370255470276, -0.13208717107772827, 0.0902184471487999, -0.7787523865699768, -0.19512401521205902, 0.31287863850593567, -0.5219089984893799, -0.49540072679519653, 0.30050039291381836, -0.7525335550308228, -0.03646551072597504, -0.3358619809150696, 0.29569172859191895, -0.16415190696716309, -0.4329088032245636, -0.05992424860596657, -0.1194281205534935, 0.230631485581398, 0.16243724524974823, -0.558889627456665, 0.3858641982078552, 0.5400179028511047, 0.9645015597343445, -0.11370343714952469, -0.4102703332901001, -0.34865787625312805, -0.30473434925079346, -0.19352835416793823, 0.41034337878227234, -0.11632735282182693, -0.3806648850440979, -0.332369863986969, 0.32281360030174255, -0.3081689476966858, -0.5661638975143433, 0.5850785970687866, -0.24508564174175262, 0.05317315086722374, -0.37746721506118774, -0.32628485560417175, -0.0994059219956398, 0.3616437613964081, -0.6124973893165588, 1.655887246131897, 0.39441636204719543, -0.8742557764053345, 0.12054673582315445, -0.8014745116233826, -0.21931391954421997, 0.02773333713412285, -0.005021632183343172, -0.4659673869609833, -0.14586222171783447, 0.13505573570728302, 0.6021827459335327, -0.33320361375808716, 0.13123612105846405, -0.1592823565006256, -0.5237032771110535, 0.16345931589603424, -0.1572415679693222, 1.0204861164093018, 0.17322370409965515, -0.5359378457069397, 0.09957026690244675, -1.057420015335083, 0.029571792110800743, 0.40134721994400024, -0.5656632781028748, -0.15925927460193634, -0.23181650042533875, 0.11330721527338028, 0.21930640935897827, 0.5804601907730103, -0.6121766567230225, 0.37183451652526855, -0.13751305639743805, 0.4276634752750397, 1.0122904777526855, -0.09656962752342224, 0.1649201661348343, -0.3909755349159241, 0.608359694480896, 0.1273324340581894, 0.22515325248241425, 0.1603427529335022, -0.44555002450942993, -0.7566627264022827, -0.3014427423477173, 0.11367839574813843, 0.7902827858924866, -0.5120741128921509, 0.8363029360771179, -0.41037482023239136, -0.7691249847412109, -0.630338191986084, 0.13498978316783905, 0.4161434471607208, 0.5623533725738525, 0.4343690872192383, -0.27860182523727417, -0.7421038150787354, -0.9413174986839294, 0.1750929057598114, -0.24052128195762634, 0.09671003371477127, 0.43670564889907837, 1.0753957033157349, -0.3467828631401062, 0.6963384747505188, -0.7667131423950195, -0.27282097935676575, -0.2696164548397064, -0.02345675602555275, 0.6376311182975769, 0.5718435049057007, 0.5712816715240479, -0.6858449578285217, -0.32273638248443604, -0.053951162844896317, -0.7372705936431885, -0.21822898089885712, -0.08671440929174423, -0.25991782546043396, 0.11945444345474243, 0.025426583364605904, -0.4178032875061035, 0.5775023102760315, 0.4430832266807556, -0.41845259070396423, 0.6398017406463623, -0.03260724991559982, 0.4012308120727539, -1.1456233263015747, 0.1667838841676712, 0.015868235379457474, -0.017761554569005966, -0.3571108877658844, -0.25089430809020996, -0.027192967012524605, 0.36961662769317627, -0.38422608375549316, 0.561474084854126, -0.287337064743042, -0.14246520400047302, 0.029869461432099342, 0.03416579216718674, -0.14198516309261322, 0.5893927216529846, -0.10124757885932922, 0.9010691046714783, 0.45022451877593994, -0.335336297750473, 0.3975469768047333, 0.577340841293335, -0.5266382098197937, 0.18086466193199158, -0.5949743390083313, 0.046661823987960815, 0.1731049120426178, 0.34221401810646057, -0.9793412089347839, -0.35724571347236633, 0.4492476284503937, -0.5167673230171204, 0.161137655377388, -0.19756750762462616, -0.5633804798126221, -0.575041651725769, -0.4962560832500458, 0.28434908390045166, 0.4555664658546448, -0.547375500202179, 0.016566386446356773, 0.3180164694786072, 0.021572893485426903, -0.7493962645530701, -0.8635469675064087, -0.15821373462677002, -0.33444085717201233, -0.6051291823387146, 0.46559470891952515, -0.18985393643379211, -0.18558059632778168, -0.0836481973528862, -0.15616121888160706, -0.2436872124671936, 0.2371794581413269, 0.33059602975845337, 0.4899072051048279, -0.1861186921596527, -0.307812362909317, -0.18723440170288086, -0.1456177681684494, 0.08538748323917389, 0.12238788604736328, 0.5448090434074402, -0.33604705333709717, -0.22166813910007477, -0.296897828578949, 0.14144352078437805, 0.37518569827079773, -0.21854038536548615, 0.8473774790763855, 0.7345889210700989, -0.19816160202026367, -0.02543495036661625, -0.4658471345901489, -0.006737707648426294, -0.49418845772743225, 0.37215232849121094, -0.28345000743865967, -0.916458010673523, 0.7346058487892151, 0.1617916077375412, 0.034454818814992905, 0.6882631182670593, 0.631489634513855, 0.0145416846498847, 0.6558655500411987, 0.2870790362358093, -0.023311583325266838, 0.6454265117645264, -0.8249524235725403, -0.011507326737046242, -1.0680748224258423, -0.416202187538147, -0.4385944902896881, -0.4220551550388336, -0.738770067691803, -0.4123261868953705, 0.1542462706565857, 0.28551363945007324, -0.5445025563240051, 0.6251267194747925, -0.7188993692398071, 0.2743176817893982, 0.6398317217826843, 0.2710566520690918, 0.029799392446875572, -0.0809641033411026, -0.06081050634384155, 0.20314207673072815, -0.4945380985736847, -0.48842015862464905, 1.3843259811401367, 0.25526005029678345, 0.6132542490959167, -0.10015054792165756, 1.0402189493179321, 0.25940966606140137, 0.2401418834924698, -0.5206637382507324, 0.5979429483413696, -0.0322253443300724, -0.4929559528827667, -0.07815470546483994, -0.582501232624054, -0.8209720849990845, 0.16608300805091858, 0.02931791916489601, -0.902992308139801, 0.09164607524871826, 0.029614346101880074, -0.06658865511417389, 0.25501561164855957, -0.5367544889450073, 0.8347678184509277, -0.4237369894981384, -0.33548980951309204, 0.020730862393975258, -0.9125951528549194, 0.4464050829410553, -0.08106812089681625, 0.22811876237392426, -0.2811487317085266, 0.04968031495809555, 1.182542324066162, -0.6958879828453064, 0.7710937261581421, -0.19124095141887665, 0.08135427534580231, 0.41468846797943115, -0.4497443437576294, 0.56592857837677, -0.018606780096888542, -0.24396702647209167, 0.503643810749054, -0.11347854137420654, -0.2727240025997162, -0.214201420545578, 0.8246331810951233, -0.9797183275222778, -0.2966368496417999, -0.5018786787986755, -0.46144431829452515, 0.23946715891361237, 0.21759392321109772, 0.40803131461143494, 0.1782824546098709, -0.054523445665836334, 0.22613313794136047, 0.2580876350402832, -0.13771483302116394, 0.48875343799591064, 0.4152259826660156, -0.02542746067047119, -0.6530353426933289, 0.7206035256385803, 0.28297290205955505, 0.0258187223225832, 0.30147072672843933, 0.0288890041410923, -0.526506245136261, -0.48292580246925354, -0.3542101979255676, 0.24017900228500366, -0.5083507895469666, -0.3684360086917877, -0.3899882733821869, -0.28962773084640503, -0.33384349942207336, -0.062453124672174454, -0.4396737217903137, -0.5400068759918213, -0.49233338236808777, -0.40506061911582947, 0.7366846799850464, 0.6661410927772522, -0.4484100341796875, 0.5270909070968628, -0.7250868082046509, 0.2453794777393341, -0.2184218168258667, 0.4140594005584717, -0.10223223268985748, -0.5401690006256104, -0.38984230160713196, 0.19190989434719086, -0.4786706864833832, -0.8466835618019104, 0.4766811430454254, -0.059192828834056854, 0.6826841831207275, 0.07495757937431335, 0.0948043093085289, 0.7907822728157043, -0.29181885719299316, 1.0293651819229126, 0.08886564522981644, -0.6755785346031189, 0.8655201196670532, -0.28059014678001404, -0.08689141273498535, 0.6022775769233704, 0.27534669637680054, -0.36885547637939453, -0.10013245046138763, -0.9085853695869446, -1.1398292779922485, 1.1660348176956177, 0.5620715618133545, -0.3458417057991028, 0.02278863824903965, 0.37135374546051025, -0.03001434914767742, 0.18480131030082703, -0.754672110080719, -0.6394960284233093, -0.23885276913642883, -0.30214378237724304, -0.10989894717931747, -0.07570207864046097, -0.3853699266910553, -0.4933054447174072, 0.844274640083313, 0.08599414676427841, 0.49257731437683105, 0.21513274312019348, -0.10881030559539795, 0.0026195545215159655, 0.2020125538110733, 0.48036468029022217, 0.6892511248588562, -0.42545148730278015, -0.12703266739845276, 0.32052311301231384, -0.7413750290870667, 0.26650485396385193, 0.33445677161216736, 0.06714148819446564, -0.22634978592395782, 0.6410594582557678, 1.0340138673782349, 0.09022924304008484, -0.40066638588905334, 0.6019809246063232, 0.029090292751789093, -0.3982771933078766, -0.5110347867012024, 0.1311991810798645, -0.1417653113603592, 0.358848512172699, 0.4254663586616516, -0.05787697434425354, -0.13340087234973907, -0.1609397828578949, 0.29633069038391113, 0.12312392145395279, -0.12085382640361786, -0.3532865047454834, 0.6375331878662109, 0.02829168364405632, -0.33362314105033875, 0.7839885354042053, -0.049549445509910583, -0.6441229581832886, 1.0821616649627686, 0.36521345376968384, 0.828177809715271, -0.13097824156284332, 0.08925752341747284, 0.5918124318122864, 0.39551734924316406, -0.27350369095802307, 0.6041674017906189, -0.0564892403781414, -0.5742780566215515, -0.30043911933898926, -0.8338829874992371, -0.04226817190647125, 0.349740594625473, -1.0620518922805786, 0.32992851734161377, -0.17173098027706146, -0.24930047988891602, -0.22845178842544556, 0.4922082722187042, -0.9141325950622559, 0.2550791800022125, -0.024214474484324455, 0.8858041167259216, -1.0754506587982178, 0.7076812386512756, 0.824040949344635, -0.5663968324661255, -0.9258506298065186, -0.26637133955955505, 0.032844576984643936, -0.9699952602386475, 0.6867328882217407, 0.32118019461631775, 0.29333439469337463, -0.09984609484672546, -0.5983933210372925, -1.0425931215286255, 1.5382853746414185, 0.20820215344429016, -0.6484994292259216, 0.15978097915649414, 0.17007113993167877, 0.3654376268386841, -0.23422285914421082, 0.5305421948432922, 0.8060509562492371, 0.6945751905441284, 0.034658726304769516, -0.9599102139472961, 0.3139896094799042, -0.42312467098236084, -0.09678007662296295, 0.23517270386219025, -0.997606098651886, 0.9627949595451355, -0.19948892295360565, 0.10488981753587723, -0.08914396911859512, 0.3746775984764099, 0.529424786567688, 0.22884541749954224, 0.43408435583114624, 0.754974901676178, 0.7316545248031616, -0.35453924536705017, 0.9497601389884949, -0.22505435347557068, 0.7691732048988342, 1.0349172353744507, 0.0410708412528038, 0.79059898853302, 0.3370283842086792, -0.54887855052948, 0.42187443375587463, 0.9713042378425598, -0.3602581322193146, 0.541509211063385, 0.21821796894073486, -0.046767786145210266, -0.08597376197576523, 0.20453695952892303, -0.5785800814628601, 0.49890995025634766, 0.1803761124610901, -0.512348473072052, -0.09582245349884033, -0.28863468766212463, 0.08341314643621445, -0.3506224751472473, -0.1622798591852188, 0.5721784234046936, 0.06763136386871338, -0.49199947714805603, 0.767498791217804, -0.12502485513687134, 0.7056965827941895, -0.6828376054763794, -0.11560255289077759, -0.20679880678653717, 0.268709272146225, -0.5403101444244385, -0.960896909236908, 0.23982436954975128, 0.20006869733333588, -0.23053662478923798, -0.2957024872303009, 0.49638572335243225, -0.24412907660007477, -0.5926178097724915, 0.4666345417499542, 0.3710542619228363, 0.37614181637763977, 0.14135059714317322, -0.9247266054153442, 0.23396162688732147, 0.3349132239818573, -0.800969660282135, 0.3872493803501129, 0.2680542767047882, 0.028490964323282242, 0.47954291105270386, 0.7290564775466919, 0.010142466053366661, 0.13574263453483582, -0.1517806500196457, 1.1628875732421875, -0.7436785101890564, -0.3835468888282776, -0.8286119103431702, 0.8805583119392395, -0.17063461244106293, -0.7022740840911865, 0.7487326860427856, 0.9067580699920654, 0.8830080628395081, 0.16320379078388214, 0.844308078289032, -0.45253705978393555, 0.42338505387306213, -0.37138721346855164, 0.8145840764045715, -0.7962906360626221, 0.32378506660461426, -0.2128477245569229, -0.8310365080833435, 0.024990063160657883, 0.8647838234901428, -0.2803954780101776, -0.008080887608230114, 0.5407317876815796, 0.9023764729499817, -0.014294717460870743, 0.1294170767068863, -0.1820332258939743, 0.4272480309009552, 0.2919633388519287, 0.6288970112800598, 0.711818516254425, -0.7181544303894043, 0.4257153570652008, -0.6564137935638428, -0.5303994417190552, -0.20229700207710266, -0.7105448842048645, -0.7600029706954956, -0.6101752519607544, -0.35335269570350647, -0.4300414025783539, -0.04436686262488365, 0.9829955697059631, 0.5145357847213745, -0.7396601438522339, -0.36118999123573303, -0.06157316640019417, 0.06685740500688553, -0.24554991722106934, -0.2955535650253296, 0.616672694683075, -0.052412357181310654, -0.6366345286369324, 0.46710914373397827, -0.22129905223846436, -0.043499384075403214, 0.09477713704109192, -0.21976979076862335, -0.43815383315086365, -0.3402755558490753, 0.44606471061706543, 0.13520140945911407, -0.6700782775878906, -0.27658528089523315, -0.10983025282621384, -0.019758371636271477, 0.36425843834877014, 0.3127613663673401, -0.6080539226531982, -0.045149385929107666, 0.5315185189247131, 0.2404324859380722, 0.8306911587715149, 0.0717071071267128, 0.24678511917591095, -0.8187978267669678, 0.14429786801338196, 0.06059054285287857, 0.43419089913368225, 0.16486385464668274, -0.4550512135028839, 1.0752369165420532, 0.33426880836486816, -0.7453789114952087, -0.991588830947876, -0.22900082170963287, -1.2605483531951904, 0.05272737145423889, 1.3305740356445312, -0.3421453833580017, -0.2693423628807068, 0.19911283254623413, -0.2185245156288147, 0.3839014768600464, -0.5638009309768677, 0.45061638951301575, 0.7136589884757996, -0.3246549963951111, 0.11212380975484848, -0.6825537085533142, 0.3463718593120575, 0.010965840891003609, -0.9086384773254395, 0.006529616191983223, 0.2967262268066406, 0.5294449329376221, 0.05725052207708359, 0.837319016456604, 0.06784496456384659, -0.12745150923728943, 0.05296066030859947, 0.10791746526956558, -0.3287239074707031, -0.13135798275470734, -0.15729117393493652, -0.1044292077422142, -0.41274887323379517, -0.5583332180976868 ]
open-llm-leaderboard/details_dvruette__oasst-llama-13b-1000-steps
open-llm-leaderboard
2023-10-19T08:21:57Z
200
0
[ "region:us" ]
null
2023-08-18T11:37:22Z
--- pretty_name: Evaluation run of dvruette/oasst-llama-13b-1000-steps dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [dvruette/oasst-llama-13b-1000-steps](https://huggingface.co/dvruette/oasst-llama-13b-1000-steps)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dvruette__oasst-llama-13b-1000-steps\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T08:21:45.540153](https://huggingface.co/datasets/open-llm-leaderboard/details_dvruette__oasst-llama-13b-1000-steps/blob/main/results_2023-10-19T08-21-45.540153.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0959521812080537,\n\ \ \"em_stderr\": 0.0030162183550142383,\n \"f1\": 0.16973573825503283,\n\ \ \"f1_stderr\": 0.003251453767412336,\n \"acc\": 0.44401178094667637,\n\ \ \"acc_stderr\": 0.010227191296479903\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0959521812080537,\n \"em_stderr\": 0.0030162183550142383,\n\ \ \"f1\": 0.16973573825503283,\n \"f1_stderr\": 0.003251453767412336\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11296436694465505,\n \ \ \"acc_stderr\": 0.008719339028833073\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7750591949486977,\n \"acc_stderr\": 0.011735043564126735\n\ \ }\n}\n```" repo_url: https://huggingface.co/dvruette/oasst-llama-13b-1000-steps leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:48:56.824224.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T08_21_45.540153 path: - '**/details_harness|drop|3_2023-10-19T08-21-45.540153.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T08-21-45.540153.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T08_21_45.540153 path: - '**/details_harness|gsm8k|5_2023-10-19T08-21-45.540153.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T08-21-45.540153.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hellaswag|10_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:48:56.824224.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:48:56.824224.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_48_56.824224 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:48:56.824224.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:48:56.824224.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T08_21_45.540153 path: - '**/details_harness|winogrande|5_2023-10-19T08-21-45.540153.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T08-21-45.540153.parquet' - config_name: results data_files: - split: 2023_07_19T18_48_56.824224 path: - results_2023-07-19T18:48:56.824224.parquet - split: 2023_10_19T08_21_45.540153 path: - results_2023-10-19T08-21-45.540153.parquet - split: latest path: - results_2023-10-19T08-21-45.540153.parquet --- # Dataset Card for Evaluation run of dvruette/oasst-llama-13b-1000-steps ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dvruette/oasst-llama-13b-1000-steps - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dvruette/oasst-llama-13b-1000-steps](https://huggingface.co/dvruette/oasst-llama-13b-1000-steps) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dvruette__oasst-llama-13b-1000-steps", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T08:21:45.540153](https://huggingface.co/datasets/open-llm-leaderboard/details_dvruette__oasst-llama-13b-1000-steps/blob/main/results_2023-10-19T08-21-45.540153.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0959521812080537, "em_stderr": 0.0030162183550142383, "f1": 0.16973573825503283, "f1_stderr": 0.003251453767412336, "acc": 0.44401178094667637, "acc_stderr": 0.010227191296479903 }, "harness|drop|3": { "em": 0.0959521812080537, "em_stderr": 0.0030162183550142383, "f1": 0.16973573825503283, "f1_stderr": 0.003251453767412336 }, "harness|gsm8k|5": { "acc": 0.11296436694465505, "acc_stderr": 0.008719339028833073 }, "harness|winogrande|5": { "acc": 0.7750591949486977, "acc_stderr": 0.011735043564126735 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3469560146331787, -0.6880932450294495, 0.25997745990753174, 0.26874858140945435, -0.16757197678089142, 0.20435374975204468, -0.25159111618995667, -0.1404593586921692, 0.4238705635070801, 0.5119955539703369, -0.7674190998077393, -0.9513397812843323, -0.7067678570747375, 0.19651511311531067, -0.1728803515434265, 1.0645672082901, -0.36859267950057983, -0.17379626631736755, 0.09309786558151245, -0.28741616010665894, -0.3802255094051361, -0.3582000434398651, -0.46797695755958557, -0.4067646861076355, 0.39278295636177063, 0.639212965965271, 0.2661312222480774, 0.7522302269935608, 0.7114719748497009, 0.37373486161231995, -0.1390366405248642, 0.20600229501724243, -0.48069801926612854, -0.039253514260053635, 0.2580740451812744, -0.581009566783905, -0.880743682384491, 0.10785303264856339, 0.6644182801246643, 0.4503638744354248, -0.19587133824825287, 0.6970874667167664, 0.059888195246458054, 0.6679222583770752, -0.42639967799186707, 0.43130478262901306, -0.26864859461784363, -0.07680357992649078, -0.29335910081863403, -0.12263663113117218, 0.03544986993074417, -0.33657360076904297, -0.25151267647743225, -0.48380962014198303, 0.05590341240167618, 0.10258852690458298, 1.0004844665527344, 0.2611009180545807, -0.14135125279426575, -0.24182550609111786, -0.3205859661102295, 0.8428729176521301, -0.8127593398094177, -0.044763121753931046, 0.7059323191642761, 0.13190118968486786, -0.33062800765037537, -0.5753748416900635, -0.4438928961753845, -0.19423046708106995, -0.17814482748508453, 0.1406179666519165, 0.01680980809032917, -0.2056623250246048, 0.4519819915294647, 0.698918342590332, -0.6461293697357178, 0.0602666437625885, -0.5563395619392395, -0.06283719092607498, 1.0082001686096191, 0.43347305059432983, 0.048107389360666275, -0.48579418659210205, -0.4580669701099396, -0.3822516202926636, -0.4019891023635864, 0.23182564973831177, 0.46078434586524963, 0.44973739981651306, -0.6033512353897095, 0.8422253727912903, -0.5197910666465759, 0.5226126313209534, -0.040199533104896545, -0.3305041790008545, 0.8525314331054688, -0.5174302458763123, -0.21252702176570892, 0.07309237122535706, 1.053605556488037, 0.3887622356414795, -0.023411475121974945, 0.21828952431678772, -0.2943094074726105, -0.035349130630493164, 0.023623354732990265, -0.7973445057868958, -0.13649335503578186, 0.41428908705711365, -0.5212846994400024, -0.42457956075668335, 0.2751986086368561, -0.8665050268173218, -0.20090654492378235, -0.29070496559143066, 0.22182172536849976, -0.21344050765037537, -0.34116071462631226, 0.03989521414041519, -0.05647319182753563, 0.20992818474769592, 0.19801728427410126, -0.5145512223243713, 0.40972277522087097, 0.6209849715232849, 1.0037667751312256, -0.11252067983150482, -0.36314788460731506, -0.40051302313804626, -0.32286351919174194, -0.17753559350967407, 0.37647730112075806, -0.17251871526241302, -0.39518365263938904, -0.2166680544614792, 0.2857424318790436, -0.2972422242164612, -0.5484563708305359, 0.6560238003730774, -0.28142064809799194, 0.04340276122093201, -0.29540491104125977, -0.36752936244010925, -0.13267654180526733, 0.3940368890762329, -0.6768102645874023, 1.564522624015808, 0.34665408730506897, -0.8313705325126648, 0.09271197766065598, -0.8987353444099426, -0.18048402667045593, 0.005924645345658064, 0.030148252844810486, -0.5354762077331543, -0.17068038880825043, 0.16799606382846832, 0.5796520709991455, -0.32877838611602783, 0.02355879731476307, -0.20433780550956726, -0.39265838265419006, 0.11664298921823502, -0.047918520867824554, 1.1028261184692383, 0.183826744556427, -0.471627801656723, 0.019582677632570267, -1.134222388267517, 0.008784482255578041, 0.4686806797981262, -0.630634605884552, -0.27333804965019226, -0.24910332262516022, 0.020232535898685455, 0.11967694759368896, 0.5614326596260071, -0.6977900266647339, 0.49687227606773376, -0.121312715113163, 0.3949684798717499, 0.9709087610244751, -0.03710782900452614, 0.26085132360458374, -0.41970545053482056, 0.5643281936645508, 0.025510136038064957, 0.2786867916584015, 0.04021235927939415, -0.5725980401039124, -0.7905638813972473, -0.30353906750679016, 0.1599252074956894, 0.7468416690826416, -0.4877833425998688, 0.771994948387146, -0.4727613627910614, -0.7163124084472656, -0.7318386435508728, 0.13058984279632568, 0.42522501945495605, 0.5926137566566467, 0.4022158086299896, -0.34001424908638, -0.6778192520141602, -0.8813056945800781, 0.1389484405517578, -0.2723924517631531, 0.0547795295715332, 0.4591899812221527, 1.1342380046844482, -0.3302750885486603, 0.7138678431510925, -0.774050235748291, -0.3536192774772644, -0.22688761353492737, 0.019851883873343468, 0.7572539448738098, 0.5023874044418335, 0.4653806984424591, -0.652482807636261, -0.3513367772102356, -0.06298752129077911, -0.7423363924026489, -0.19718962907791138, -0.05509815737605095, -0.2689337730407715, 0.2623401880264282, 0.025012480095028877, -0.47319793701171875, 0.6079347729682922, 0.5018132328987122, -0.5068086981773376, 0.5881021022796631, -0.012111276388168335, 0.4438588619232178, -1.1732498407363892, 0.1400318741798401, 0.01741098426282406, 0.0025623624678701162, -0.3791699707508087, -0.14819899201393127, 0.011359688825905323, 0.3988018333911896, -0.405348539352417, 0.629002034664154, -0.3681574761867523, -0.13574080169200897, 0.08540590852499008, 0.14172588288784027, -0.1057690978050232, 0.5677955150604248, -0.24084660410881042, 0.7304638028144836, 0.504287838935852, -0.40699148178100586, 0.4743703007698059, 0.5913625359535217, -0.6005618572235107, 0.2825798988342285, -0.5021179914474487, 0.04983316361904144, 0.14923548698425293, 0.23697514832019806, -1.009405493736267, -0.3700313866138458, 0.5108716487884521, -0.5712831616401672, 0.16505073010921478, -0.266671746969223, -0.6164363622665405, -0.5283377170562744, -0.5053421854972839, 0.25011494755744934, 0.4931812882423401, -0.5012115836143494, 0.2661375105381012, 0.35034242272377014, 0.02318785898387432, -0.6967374682426453, -0.7691360116004944, -0.1371937245130539, -0.331471711397171, -0.6635090112686157, 0.3575955033302307, -0.158455491065979, -0.3159014582633972, -0.02971450239419937, -0.12087863683700562, -0.0752524584531784, 0.18769587576389313, 0.4179961085319519, 0.5470153093338013, -0.14024236798286438, -0.304977148771286, -0.287397563457489, -0.11209501326084137, 0.10829313844442368, 0.13199548423290253, 0.6419686079025269, -0.275958389043808, -0.33600619435310364, -0.2375946044921875, 0.10703287273645401, 0.458473265171051, -0.1741686761379242, 0.8771070241928101, 0.6543050408363342, -0.1939527988433838, -0.024297596886754036, -0.40948599576950073, 0.04137556254863739, -0.4752591848373413, 0.32794564962387085, -0.3209216296672821, -0.7660835385322571, 0.8360785245895386, 0.18245111405849457, 0.1473553478717804, 0.6926617622375488, 0.5715512037277222, 0.06421195715665817, 0.693433403968811, 0.2636543810367584, -0.2062033712863922, 0.5660870671272278, -0.8339331746101379, -0.04545000568032265, -1.1289198398590088, -0.43787655234336853, -0.5067607760429382, -0.448075532913208, -0.7984079718589783, -0.4313782751560211, 0.22893820703029633, 0.19914835691452026, -0.5132472515106201, 0.6044591069221497, -0.635317862033844, 0.17426136136054993, 0.649437427520752, 0.26324033737182617, 0.13921351730823517, -0.05535663664340973, -0.1048484519124031, 0.29625603556632996, -0.496247798204422, -0.46626368165016174, 1.3268167972564697, 0.29145705699920654, 0.726655125617981, -0.043532662093639374, 1.0655077695846558, 0.2959584891796112, 0.24401436746120453, -0.6008384823799133, 0.6871645450592041, 0.06964698433876038, -0.43646904826164246, -0.21585334837436676, -0.5841861963272095, -0.945819616317749, 0.1506069153547287, 0.018783610314130783, -0.9758961200714111, 0.08552183210849762, -0.03042615018785, -0.07083875685930252, 0.3613991141319275, -0.4647628366947174, 0.7316198348999023, -0.27630144357681274, -0.3733022212982178, 0.1036670133471489, -0.8418635129928589, 0.42412495613098145, 0.03663112223148346, 0.37515178322792053, -0.30899253487586975, -0.03252581134438515, 1.0879974365234375, -0.656696617603302, 0.7858746647834778, -0.1772436797618866, 0.014541033655405045, 0.3934088349342346, -0.302730917930603, 0.655647873878479, -0.06645199656486511, -0.2479764223098755, 0.5106592774391174, -0.22290703654289246, -0.29727858304977417, -0.23126985132694244, 0.8798813223838806, -0.839029848575592, -0.375567764043808, -0.5353021621704102, -0.5392767190933228, 0.265014111995697, 0.17927877604961395, 0.2674069404602051, 0.1815982460975647, 0.014150863513350487, 0.19976648688316345, 0.2922033965587616, -0.1832253336906433, 0.5274439454078674, 0.4517768919467926, -0.12501047551631927, -0.7047727704048157, 0.6846728324890137, 0.23341397941112518, 0.11506519466638565, 0.25005728006362915, 0.0767299234867096, -0.4646593928337097, -0.4318090081214905, -0.4575178623199463, 0.31254321336746216, -0.5716995596885681, -0.3328862190246582, -0.43760237097740173, -0.23138178884983063, -0.4066739082336426, 0.03245619684457779, -0.423696905374527, -0.5186605453491211, -0.4285632371902466, -0.30474308133125305, 0.7849535942077637, 0.6700897812843323, -0.37734901905059814, 0.2856764495372772, -0.7575656175613403, 0.2849525511264801, -0.20750601589679718, 0.3911764621734619, -0.07330340892076492, -0.5949253439903259, -0.40513986349105835, 0.15901324152946472, -0.4502890110015869, -0.8144058585166931, 0.5551682114601135, -0.0441095307469368, 0.6444599628448486, 0.06411551684141159, 0.21861471235752106, 0.8265800476074219, -0.32537078857421875, 1.052518367767334, 0.027092812582850456, -0.6960963010787964, 0.8046523332595825, -0.26872578263282776, 0.06994231045246124, 0.49369296431541443, 0.29815617203712463, -0.44400668144226074, -0.19018596410751343, -0.8580592274665833, -1.2400760650634766, 1.0934653282165527, 0.5803616046905518, -0.36647164821624756, 0.08120834082365036, 0.3434242904186249, -0.07977469265460968, 0.1675492227077484, -0.6716564297676086, -0.7777596116065979, -0.11549820005893707, -0.2697950303554535, -0.09626255929470062, -0.013412842527031898, -0.42816051840782166, -0.40670061111450195, 0.859937310218811, 0.04172657057642937, 0.4351884722709656, 0.26489269733428955, -0.05636385828256607, -0.050099942833185196, 0.2002861052751541, 0.4974851608276367, 0.633192241191864, -0.3964088261127472, -0.14210578799247742, 0.3042462170124054, -0.7343025803565979, 0.1826317459344864, 0.27384018898010254, 0.03262241557240486, -0.17370009422302246, 0.6329237818717957, 0.9425695538520813, -0.0144809540361166, -0.4358442723751068, 0.5078089833259583, 0.05662401020526886, -0.34280577301979065, -0.486244797706604, 0.13428817689418793, -0.08975622057914734, 0.4312635064125061, 0.4290657937526703, -0.10930986702442169, -0.039924606680870056, -0.2900846004486084, 0.21403226256370544, 0.1659269481897354, -0.11533315479755402, -0.29478681087493896, 0.5958868861198425, -0.07212600111961365, -0.39427340030670166, 0.7169475555419922, -0.0996992439031601, -0.5469805598258972, 1.1265509128570557, 0.2228522002696991, 0.8473722338676453, -0.18092738091945648, 0.08026274293661118, 0.6221780180931091, 0.35478609800338745, -0.24653759598731995, 0.6007737517356873, 0.061907242983579636, -0.5120729804039001, -0.2702639400959015, -0.908362865447998, -0.15794330835342407, 0.3376891016960144, -1.1050337553024292, 0.3638273775577545, -0.11021488904953003, -0.2402222901582718, -0.19293251633644104, 0.42192232608795166, -0.8646475672721863, 0.17493069171905518, -0.06800893694162369, 0.9427452683448792, -1.0096945762634277, 0.6260895729064941, 0.8238735795021057, -0.5485784411430359, -0.8521019816398621, -0.27384909987449646, 0.07571480423212051, -0.9951493144035339, 0.6601953506469727, 0.20895180106163025, 0.3297964632511139, -0.20932969450950623, -0.5957074165344238, -1.056948184967041, 1.601117730140686, 0.12464074790477753, -0.616506040096283, 0.2084181308746338, 0.21776288747787476, 0.3654642701148987, -0.32858777046203613, 0.4647539258003235, 0.7849017977714539, 0.7370893359184265, 0.05419011414051056, -0.9673656821250916, 0.3277285397052765, -0.41602495312690735, -0.10684040933847427, 0.28602999448776245, -0.9103591442108154, 1.0652867555618286, -0.21714217960834503, 0.04471283406019211, 0.06739059090614319, 0.3321985900402069, 0.5278209447860718, 0.29797813296318054, 0.45511355996131897, 0.7530293464660645, 0.6665897965431213, -0.3314930200576782, 1.071758508682251, -0.25322094559669495, 0.7900950908660889, 1.0729676485061646, -0.015501642599701881, 0.8148882389068604, 0.34464025497436523, -0.5039054155349731, 0.5291192531585693, 0.8878787159919739, -0.32906147837638855, 0.49075302481651306, 0.2506147027015686, -0.0649193525314331, -0.013392594642937183, 0.07509662955999374, -0.4638729989528656, 0.47178521752357483, 0.17994281649589539, -0.5566025376319885, -0.12403424829244614, -0.28749096393585205, 0.1278253048658371, -0.27671244740486145, -0.2980276048183441, 0.6178346276283264, 0.004552583210170269, -0.4643353819847107, 0.7902852892875671, -0.13743741810321808, 0.5976555943489075, -0.6047889590263367, -0.17207078635692596, -0.24909257888793945, 0.263350248336792, -0.5190223455429077, -0.9639942646026611, 0.13806168735027313, 0.14470158517360687, -0.20652778446674347, -0.261762797832489, 0.502702534198761, -0.2233329713344574, -0.5998725295066833, 0.47403231263160706, 0.37566155195236206, 0.34971246123313904, 0.1348564475774765, -0.8740067481994629, 0.2557346224784851, 0.275447815656662, -0.8287654519081116, 0.4153681695461273, 0.33604690432548523, 0.018992016091942787, 0.5700536966323853, 0.6993687748908997, 0.08830736577510834, 0.09220118075609207, -0.1124916672706604, 1.1837053298950195, -0.7879432439804077, -0.2929287850856781, -0.8795099854469299, 0.9594824910163879, -0.21404898166656494, -0.5876034498214722, 0.8120909929275513, 0.9203979969024658, 0.8462416529655457, 0.08081419765949249, 0.8087862730026245, -0.5017650723457336, 0.49694857001304626, -0.4032405912876129, 0.785804808139801, -0.7688089609146118, 0.36407753825187683, -0.22636324167251587, -0.830633282661438, -0.006376330275088549, 0.7528978586196899, -0.2301293909549713, -0.04954878240823746, 0.5659725069999695, 0.9636215567588806, 0.03321634605526924, 0.04908990487456322, -0.080228291451931, 0.47905489802360535, 0.2730121910572052, 0.5982543230056763, 0.6766222715377808, -0.7520655989646912, 0.4668414294719696, -0.5983536839485168, -0.5103872418403625, -0.21363654732704163, -0.779097855091095, -0.8151881694793701, -0.5775420069694519, -0.3806929886341095, -0.5201520919799805, -0.06897260248661041, 1.0250684022903442, 0.4424650967121124, -0.7940073013305664, -0.4242554306983948, -0.010092496871948242, 0.1268390715122223, -0.2482747584581375, -0.31613513827323914, 0.586275041103363, -0.030106356367468834, -0.8110611438751221, 0.36978253722190857, -0.12016903609037399, -0.12210138142108917, -0.025455167517066002, -0.24363715946674347, -0.3770028352737427, -0.2610158920288086, 0.4653698801994324, 0.1618984341621399, -0.6946021914482117, -0.2671324908733368, -0.055696528404951096, -0.03945020213723183, 0.37185508012771606, 0.28306105732917786, -0.5589876174926758, 0.02096988447010517, 0.5809873938560486, 0.21062235534191132, 0.7457042932510376, 0.05007493868470192, 0.1662478893995285, -0.7078707218170166, -0.00448708888143301, 0.03514304384589195, 0.5207292437553406, 0.17592228949069977, -0.4374239444732666, 1.0737361907958984, 0.28418487310409546, -0.7322310209274292, -0.9478061199188232, -0.18775925040245056, -1.2514808177947998, 0.05073291435837746, 1.4293562173843384, -0.37032315135002136, -0.4342823028564453, 0.12110842019319534, -0.27964040637016296, 0.30574414134025574, -0.7744141817092896, 0.5869303345680237, 0.6895057559013367, -0.364795982837677, 0.09430190175771713, -0.6941375732421875, 0.2782636880874634, -0.07408212870359421, -0.9200549125671387, -0.03916843235492706, 0.310628741979599, 0.42003709077835083, 0.27908822894096375, 0.6468467712402344, 0.01908341981470585, -0.20165638625621796, 0.045544520020484924, 0.17497779428958893, -0.29653725028038025, -0.0745096355676651, -0.1822575479745865, 0.03890272229909897, -0.39290234446525574, -0.634807288646698 ]
open-llm-leaderboard/details_chaoyi-wu__MedLLaMA_13B
open-llm-leaderboard
2023-08-27T12:35:43Z
200
0
[ "region:us" ]
null
2023-08-18T11:37:59Z
--- pretty_name: Evaluation run of chaoyi-wu/MedLLaMA_13B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [chaoyi-wu/MedLLaMA_13B](https://huggingface.co/chaoyi-wu/MedLLaMA_13B) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chaoyi-wu__MedLLaMA_13B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-07-24T13:04:01.266274](https://huggingface.co/datasets/open-llm-leaderboard/details_chaoyi-wu__MedLLaMA_13B/blob/main/results_2023-07-24T13%3A04%3A01.266274.json)\ \ (note that their might be results for other tasks in the repos if successive evals\ \ didn't cover the same tasks. You find each in the results and the \"latest\" split\ \ for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.46685175478824187,\n\ \ \"acc_stderr\": 0.03531409019484935,\n \"acc_norm\": 0.47077526563025673,\n\ \ \"acc_norm_stderr\": 0.035299387024960424,\n \"mc1\": 0.2582619339045288,\n\ \ \"mc1_stderr\": 0.0153218216884762,\n \"mc2\": 0.4053787386286284,\n\ \ \"mc2_stderr\": 0.013893490031868357\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5102389078498294,\n \"acc_stderr\": 0.014608326906285012,\n\ \ \"acc_norm\": 0.5426621160409556,\n \"acc_norm_stderr\": 0.014558106543924065\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5862378012348137,\n\ \ \"acc_stderr\": 0.004915003499517829,\n \"acc_norm\": 0.7853017327225652,\n\ \ \"acc_norm_stderr\": 0.004097736838432052\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n\ \ \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.5259259259259259,\n\ \ \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.48026315789473684,\n \"acc_stderr\": 0.040657710025626036,\n\ \ \"acc_norm\": 0.48026315789473684,\n \"acc_norm_stderr\": 0.040657710025626036\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.44,\n\ \ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.49056603773584906,\n \"acc_stderr\": 0.0307673947078081,\n\ \ \"acc_norm\": 0.49056603773584906,\n \"acc_norm_stderr\": 0.0307673947078081\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4791666666666667,\n\ \ \"acc_stderr\": 0.041775789507399935,\n \"acc_norm\": 0.4791666666666667,\n\ \ \"acc_norm_stderr\": 0.041775789507399935\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\"\ : 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.42196531791907516,\n\ \ \"acc_stderr\": 0.03765746693865151,\n \"acc_norm\": 0.42196531791907516,\n\ \ \"acc_norm_stderr\": 0.03765746693865151\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237657,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237657\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n\ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.03202563076101737,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.03202563076101737\n },\n\ \ \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n\ \ \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n\ \ \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.3793103448275862,\n \"acc_stderr\": 0.04043461861916747,\n\ \ \"acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.04043461861916747\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.23809523809523808,\n \"acc_stderr\": 0.021935878081184766,\n \"\ acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.021935878081184766\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.04216370213557835,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.04216370213557835\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.5129032258064516,\n \"acc_stderr\": 0.028434533152681855,\n \"\ acc_norm\": 0.5129032258064516,\n \"acc_norm_stderr\": 0.028434533152681855\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.28078817733990147,\n \"acc_stderr\": 0.0316185633535861,\n \"\ acc_norm\": 0.28078817733990147,\n \"acc_norm_stderr\": 0.0316185633535861\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\"\ : 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.5757575757575758,\n \"acc_stderr\": 0.038592681420702636,\n\ \ \"acc_norm\": 0.5757575757575758,\n \"acc_norm_stderr\": 0.038592681420702636\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5151515151515151,\n \"acc_stderr\": 0.03560716516531061,\n \"\ acc_norm\": 0.5151515151515151,\n \"acc_norm_stderr\": 0.03560716516531061\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.6580310880829016,\n \"acc_stderr\": 0.03423465100104283,\n\ \ \"acc_norm\": 0.6580310880829016,\n \"acc_norm_stderr\": 0.03423465100104283\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.43846153846153846,\n \"acc_stderr\": 0.025158266016868575,\n\ \ \"acc_norm\": 0.43846153846153846,\n \"acc_norm_stderr\": 0.025158266016868575\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2962962962962963,\n \"acc_stderr\": 0.027840811495871927,\n \ \ \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.027840811495871927\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.44537815126050423,\n \"acc_stderr\": 0.0322841062671639,\n \ \ \"acc_norm\": 0.44537815126050423,\n \"acc_norm_stderr\": 0.0322841062671639\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"\ acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.5871559633027523,\n \"acc_stderr\": 0.021109128133413913,\n \"\ acc_norm\": 0.5871559633027523,\n \"acc_norm_stderr\": 0.021109128133413913\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.3472222222222222,\n \"acc_stderr\": 0.032468872436376486,\n \"\ acc_norm\": 0.3472222222222222,\n \"acc_norm_stderr\": 0.032468872436376486\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.5294117647058824,\n \"acc_stderr\": 0.03503235296367992,\n \"\ acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.03503235296367992\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.6244725738396625,\n \"acc_stderr\": 0.03152256243091156,\n \ \ \"acc_norm\": 0.6244725738396625,\n \"acc_norm_stderr\": 0.03152256243091156\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5291479820627802,\n\ \ \"acc_stderr\": 0.03350073248773404,\n \"acc_norm\": 0.5291479820627802,\n\ \ \"acc_norm_stderr\": 0.03350073248773404\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5343511450381679,\n \"acc_stderr\": 0.043749285605997376,\n\ \ \"acc_norm\": 0.5343511450381679,\n \"acc_norm_stderr\": 0.043749285605997376\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6528925619834711,\n \"acc_stderr\": 0.04345724570292534,\n \"\ acc_norm\": 0.6528925619834711,\n \"acc_norm_stderr\": 0.04345724570292534\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.49074074074074076,\n\ \ \"acc_stderr\": 0.04832853553437055,\n \"acc_norm\": 0.49074074074074076,\n\ \ \"acc_norm_stderr\": 0.04832853553437055\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.4294478527607362,\n \"acc_stderr\": 0.03889066619112722,\n\ \ \"acc_norm\": 0.4294478527607362,\n \"acc_norm_stderr\": 0.03889066619112722\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.36607142857142855,\n\ \ \"acc_stderr\": 0.045723723587374296,\n \"acc_norm\": 0.36607142857142855,\n\ \ \"acc_norm_stderr\": 0.045723723587374296\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.5922330097087378,\n \"acc_stderr\": 0.0486577757041077,\n\ \ \"acc_norm\": 0.5922330097087378,\n \"acc_norm_stderr\": 0.0486577757041077\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6495726495726496,\n\ \ \"acc_stderr\": 0.0312561082442188,\n \"acc_norm\": 0.6495726495726496,\n\ \ \"acc_norm_stderr\": 0.0312561082442188\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6206896551724138,\n\ \ \"acc_stderr\": 0.01735126811754445,\n \"acc_norm\": 0.6206896551724138,\n\ \ \"acc_norm_stderr\": 0.01735126811754445\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5028901734104047,\n \"acc_stderr\": 0.02691864538323901,\n\ \ \"acc_norm\": 0.5028901734104047,\n \"acc_norm_stderr\": 0.02691864538323901\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2558659217877095,\n\ \ \"acc_stderr\": 0.014593620923210756,\n \"acc_norm\": 0.2558659217877095,\n\ \ \"acc_norm_stderr\": 0.014593620923210756\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.545751633986928,\n \"acc_stderr\": 0.028509807802626592,\n\ \ \"acc_norm\": 0.545751633986928,\n \"acc_norm_stderr\": 0.028509807802626592\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.49517684887459806,\n\ \ \"acc_stderr\": 0.028396770444111298,\n \"acc_norm\": 0.49517684887459806,\n\ \ \"acc_norm_stderr\": 0.028396770444111298\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5030864197530864,\n \"acc_stderr\": 0.027820214158594377,\n\ \ \"acc_norm\": 0.5030864197530864,\n \"acc_norm_stderr\": 0.027820214158594377\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3546099290780142,\n \"acc_stderr\": 0.028538650028878638,\n \ \ \"acc_norm\": 0.3546099290780142,\n \"acc_norm_stderr\": 0.028538650028878638\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3324641460234681,\n\ \ \"acc_stderr\": 0.01203202233226051,\n \"acc_norm\": 0.3324641460234681,\n\ \ \"acc_norm_stderr\": 0.01203202233226051\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.03033257809455502,\n\ \ \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.03033257809455502\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.46895424836601307,\n \"acc_stderr\": 0.020188804456361887,\n \ \ \"acc_norm\": 0.46895424836601307,\n \"acc_norm_stderr\": 0.020188804456361887\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5636363636363636,\n\ \ \"acc_stderr\": 0.04750185058907296,\n \"acc_norm\": 0.5636363636363636,\n\ \ \"acc_norm_stderr\": 0.04750185058907296\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5387755102040817,\n \"acc_stderr\": 0.031912820526692774,\n\ \ \"acc_norm\": 0.5387755102040817,\n \"acc_norm_stderr\": 0.031912820526692774\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6318407960199005,\n\ \ \"acc_stderr\": 0.03410410565495302,\n \"acc_norm\": 0.6318407960199005,\n\ \ \"acc_norm_stderr\": 0.03410410565495302\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42771084337349397,\n\ \ \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.42771084337349397,\n\ \ \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.6549707602339181,\n \"acc_stderr\": 0.03645981377388806,\n\ \ \"acc_norm\": 0.6549707602339181,\n \"acc_norm_stderr\": 0.03645981377388806\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2582619339045288,\n\ \ \"mc1_stderr\": 0.0153218216884762,\n \"mc2\": 0.4053787386286284,\n\ \ \"mc2_stderr\": 0.013893490031868357\n }\n}\n```" repo_url: https://huggingface.co/chaoyi-wu/MedLLaMA_13B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|arc:challenge|25_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hellaswag|10_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T13:04:01.266274.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:04:01.266274.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T13_04_01.266274 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T13:04:01.266274.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T13:04:01.266274.parquet' - config_name: results data_files: - split: 2023_07_24T13_04_01.266274 path: - results_2023-07-24T13:04:01.266274.parquet - split: latest path: - results_2023-07-24T13:04:01.266274.parquet --- # Dataset Card for Evaluation run of chaoyi-wu/MedLLaMA_13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/chaoyi-wu/MedLLaMA_13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [chaoyi-wu/MedLLaMA_13B](https://huggingface.co/chaoyi-wu/MedLLaMA_13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_chaoyi-wu__MedLLaMA_13B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-24T13:04:01.266274](https://huggingface.co/datasets/open-llm-leaderboard/details_chaoyi-wu__MedLLaMA_13B/blob/main/results_2023-07-24T13%3A04%3A01.266274.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.46685175478824187, "acc_stderr": 0.03531409019484935, "acc_norm": 0.47077526563025673, "acc_norm_stderr": 0.035299387024960424, "mc1": 0.2582619339045288, "mc1_stderr": 0.0153218216884762, "mc2": 0.4053787386286284, "mc2_stderr": 0.013893490031868357 }, "harness|arc:challenge|25": { "acc": 0.5102389078498294, "acc_stderr": 0.014608326906285012, "acc_norm": 0.5426621160409556, "acc_norm_stderr": 0.014558106543924065 }, "harness|hellaswag|10": { "acc": 0.5862378012348137, "acc_stderr": 0.004915003499517829, "acc_norm": 0.7853017327225652, "acc_norm_stderr": 0.004097736838432052 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750575, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.48026315789473684, "acc_stderr": 0.040657710025626036, "acc_norm": 0.48026315789473684, "acc_norm_stderr": 0.040657710025626036 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.49056603773584906, "acc_stderr": 0.0307673947078081, "acc_norm": 0.49056603773584906, "acc_norm_stderr": 0.0307673947078081 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4791666666666667, "acc_stderr": 0.041775789507399935, "acc_norm": 0.4791666666666667, "acc_norm_stderr": 0.041775789507399935 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.42196531791907516, "acc_stderr": 0.03765746693865151, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.03765746693865151 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237657, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237657 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4, "acc_stderr": 0.03202563076101737, "acc_norm": 0.4, "acc_norm_stderr": 0.03202563076101737 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.3793103448275862, "acc_stderr": 0.04043461861916747, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.04043461861916747 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.23809523809523808, "acc_stderr": 0.021935878081184766, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.021935878081184766 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5129032258064516, "acc_stderr": 0.028434533152681855, "acc_norm": 0.5129032258064516, "acc_norm_stderr": 0.028434533152681855 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.0316185633535861, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.0316185633535861 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5757575757575758, "acc_stderr": 0.038592681420702636, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.038592681420702636 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03560716516531061, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03560716516531061 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6580310880829016, "acc_stderr": 0.03423465100104283, "acc_norm": 0.6580310880829016, "acc_norm_stderr": 0.03423465100104283 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.43846153846153846, "acc_stderr": 0.025158266016868575, "acc_norm": 0.43846153846153846, "acc_norm_stderr": 0.025158266016868575 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871927, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871927 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.44537815126050423, "acc_stderr": 0.0322841062671639, "acc_norm": 0.44537815126050423, "acc_norm_stderr": 0.0322841062671639 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5871559633027523, "acc_stderr": 0.021109128133413913, "acc_norm": 0.5871559633027523, "acc_norm_stderr": 0.021109128133413913 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3472222222222222, "acc_stderr": 0.032468872436376486, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.032468872436376486 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03503235296367992, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03503235296367992 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6244725738396625, "acc_stderr": 0.03152256243091156, "acc_norm": 0.6244725738396625, "acc_norm_stderr": 0.03152256243091156 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5291479820627802, "acc_stderr": 0.03350073248773404, "acc_norm": 0.5291479820627802, "acc_norm_stderr": 0.03350073248773404 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5343511450381679, "acc_stderr": 0.043749285605997376, "acc_norm": 0.5343511450381679, "acc_norm_stderr": 0.043749285605997376 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.04345724570292534, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.04345724570292534 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.49074074074074076, "acc_stderr": 0.04832853553437055, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.04832853553437055 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.03889066619112722, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.03889066619112722 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.36607142857142855, "acc_stderr": 0.045723723587374296, "acc_norm": 0.36607142857142855, "acc_norm_stderr": 0.045723723587374296 }, "harness|hendrycksTest-management|5": { "acc": 0.5922330097087378, "acc_stderr": 0.0486577757041077, "acc_norm": 0.5922330097087378, "acc_norm_stderr": 0.0486577757041077 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6495726495726496, "acc_stderr": 0.0312561082442188, "acc_norm": 0.6495726495726496, "acc_norm_stderr": 0.0312561082442188 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6206896551724138, "acc_stderr": 0.01735126811754445, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.01735126811754445 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5028901734104047, "acc_stderr": 0.02691864538323901, "acc_norm": 0.5028901734104047, "acc_norm_stderr": 0.02691864538323901 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2558659217877095, "acc_stderr": 0.014593620923210756, "acc_norm": 0.2558659217877095, "acc_norm_stderr": 0.014593620923210756 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.545751633986928, "acc_stderr": 0.028509807802626592, "acc_norm": 0.545751633986928, "acc_norm_stderr": 0.028509807802626592 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.49517684887459806, "acc_stderr": 0.028396770444111298, "acc_norm": 0.49517684887459806, "acc_norm_stderr": 0.028396770444111298 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5030864197530864, "acc_stderr": 0.027820214158594377, "acc_norm": 0.5030864197530864, "acc_norm_stderr": 0.027820214158594377 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3546099290780142, "acc_stderr": 0.028538650028878638, "acc_norm": 0.3546099290780142, "acc_norm_stderr": 0.028538650028878638 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3324641460234681, "acc_stderr": 0.01203202233226051, "acc_norm": 0.3324641460234681, "acc_norm_stderr": 0.01203202233226051 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5257352941176471, "acc_stderr": 0.03033257809455502, "acc_norm": 0.5257352941176471, "acc_norm_stderr": 0.03033257809455502 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.46895424836601307, "acc_stderr": 0.020188804456361887, "acc_norm": 0.46895424836601307, "acc_norm_stderr": 0.020188804456361887 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5636363636363636, "acc_stderr": 0.04750185058907296, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.04750185058907296 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5387755102040817, "acc_stderr": 0.031912820526692774, "acc_norm": 0.5387755102040817, "acc_norm_stderr": 0.031912820526692774 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.038515976837185335, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.038515976837185335 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6549707602339181, "acc_stderr": 0.03645981377388806, "acc_norm": 0.6549707602339181, "acc_norm_stderr": 0.03645981377388806 }, "harness|truthfulqa:mc|0": { "mc1": 0.2582619339045288, "mc1_stderr": 0.0153218216884762, "mc2": 0.4053787386286284, "mc2_stderr": 0.013893490031868357 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.714909017086029, -0.8547215461730957, 0.28475499153137207, 0.22367984056472778, -0.1679566651582718, -0.0823039636015892, 0.014141724444925785, -0.2209406942129135, 0.5886063575744629, -0.04453836753964424, -0.48932352662086487, -0.690283477306366, -0.4149101972579956, 0.24200430512428284, 0.0012076430721208453, 0.8253657817840576, -0.2065971940755844, -0.09371642023324966, 0.10463700443506241, -0.043191008269786835, -0.21912743151187897, -0.3323017358779907, -0.49465271830558777, -0.36219653487205505, 0.21187685430049896, 0.42797723412513733, 0.4241079092025757, 0.827135443687439, 0.6998441815376282, 0.29306161403656006, -0.297885924577713, 0.008101187646389008, -0.18007424473762512, -0.31834307312965393, 0.40897372364997864, -0.3397664725780487, -0.8737517595291138, 0.3388059437274933, 0.7626208066940308, 0.6575987339019775, -0.09832344949245453, 0.30103540420532227, 0.03846845030784607, 0.607177197933197, -0.33895406126976013, 0.030891217291355133, -0.2519824504852295, 0.2373584508895874, -0.1920677125453949, -0.29746848344802856, -0.30397072434425354, -0.2881467342376709, -0.12776030600070953, -0.8641494512557983, 0.23561006784439087, 0.29329216480255127, 1.5840510129928589, -0.1595570147037506, -0.25937989354133606, 0.1087365597486496, -0.11143943667411804, 1.0042952299118042, -0.8677436709403992, 0.35175463557243347, 0.7844772338867188, 0.1433056890964508, -0.17674677073955536, -0.6018132567405701, -0.610286295413971, 0.0655905082821846, -0.34659504890441895, 0.3803861737251282, -0.07166608422994614, -0.16872160136699677, 0.36508268117904663, 0.6880858540534973, -0.639170229434967, 0.17239554226398468, -0.6620339751243591, -0.15864495933055878, 1.053355097770691, 0.3563286066055298, 0.060130711644887924, -0.366545170545578, -0.7142664194107056, -0.6262621283531189, -0.4280303716659546, 0.28195783495903015, 0.42289796471595764, 0.3275798559188843, -0.4163080155849457, 0.6913739442825317, -0.39550915360450745, 0.5700227618217468, 0.43596431612968445, 0.015370561741292477, 0.8819236755371094, -0.6666660904884338, -0.562994122505188, -0.07460808753967285, 1.1207494735717773, 0.5939505696296692, 0.05225689709186554, 0.22984693944454193, 0.032448623329401016, -0.11755408346652985, 0.022281663492321968, -0.8749935626983643, -0.30707308650016785, 0.16761380434036255, -0.43266183137893677, -0.5250059962272644, 0.3534945845603943, -0.8949533104896545, 0.12346775829792023, -0.025523819029331207, 0.4000742435455322, -0.486483097076416, -0.10518249869346619, 0.24341511726379395, -0.3641993999481201, 0.805057942867279, -0.18386141955852509, -0.7834075689315796, 0.386584997177124, 0.490745484828949, 0.7606886625289917, -0.08347863703966141, -0.42756548523902893, -0.13845223188400269, -0.11511038988828659, -0.3087875247001648, 0.5238442420959473, -0.24864551424980164, -0.42537757754325867, -0.284849613904953, 0.29394200444221497, -0.2765900790691376, -0.35451486706733704, 0.7568385004997253, -0.20562638342380524, 0.21297435462474823, -0.4313141107559204, -0.6369563341140747, 0.1259329617023468, 0.3793904483318329, -0.4333958029747009, 1.2613078355789185, 0.2603047788143158, -0.8242536783218384, 0.412908673286438, -0.5985196232795715, -0.15251335501670837, -0.048517294228076935, -0.03753833472728729, -0.798316478729248, -0.275034636259079, 0.17415091395378113, 0.40824294090270996, -0.16089105606079102, -0.12355706840753555, -0.3887438178062439, -0.33767175674438477, 0.3907621204853058, -0.17799611389636993, 1.2375500202178955, -0.006577386055141687, -0.7391106486320496, -0.0993647649884224, -1.2656103372573853, 0.33573028445243835, 0.1977434605360031, -0.413414865732193, -0.17795568704605103, -0.4841499626636505, -0.0038420618511736393, 0.20368079841136932, 0.2927824854850769, -0.799606442451477, 0.3036593794822693, -0.3695686161518097, 0.17378602921962738, 1.2910271883010864, 0.014993060380220413, 0.1374599039554596, -0.5883963108062744, 0.5345388650894165, 0.22317680716514587, 0.1932954341173172, 0.3873850405216217, -0.5905824899673462, -0.8182429671287537, -0.50907963514328, -0.024545568972826004, 0.6086419224739075, -0.1836053878068924, 1.1491628885269165, 0.08417586982250214, -0.9098038673400879, -0.504404604434967, -0.13785842061042786, 0.49848923087120056, 0.8114878535270691, 0.5967410206794739, -0.027106057852506638, -0.6558336615562439, -1.1185745000839233, -0.26581209897994995, -0.17357654869556427, 0.13022054731845856, 0.23968620598316193, 1.0112725496292114, -0.2853991985321045, 0.563343346118927, -1.0506319999694824, -0.21805919706821442, 0.1804960072040558, -0.062248870730400085, 0.7885710597038269, 0.7416855692863464, 0.5880087614059448, -0.6453529000282288, -0.5156689882278442, 0.2009039968252182, -0.8867921829223633, -0.09910093992948532, 0.1287492960691452, -0.3208581507205963, 0.16122174263000488, 0.13080230355262756, -0.696297824382782, 0.5315670371055603, 0.24003157019615173, -1.0774978399276733, 1.0745222568511963, -0.3680728077888489, 0.5872457027435303, -1.0352003574371338, 0.19771380722522736, -0.039007049053907394, 0.05355869233608246, -0.5022571086883545, 0.029936760663986206, 0.11341220885515213, 0.45546644926071167, -0.45774760842323303, 0.7799909114837646, -0.6857278347015381, -0.05156742408871651, 0.45676878094673157, 0.115400530397892, -0.14126265048980713, 0.3752651810646057, -0.2467259168624878, 0.7865743637084961, 0.753379762172699, -0.48360559344291687, 0.5288198590278625, 0.377413809299469, -0.23156704008579254, 0.7206167578697205, -0.4862898588180542, -0.3049408495426178, 0.33018219470977783, -0.0677654817700386, -0.8091040849685669, -0.4719904363155365, 0.07006935775279999, -0.595767080783844, -0.11126372218132019, 0.39622074365615845, -0.27132588624954224, -0.8205081820487976, -0.9615104794502258, 0.316103458404541, 0.7078566551208496, -0.436885803937912, -0.18081803619861603, 0.06822000443935394, 0.13018637895584106, -0.8270431756973267, -0.8205437064170837, -0.5055966973304749, -0.230897456407547, -0.7162998914718628, 0.31437408924102783, -0.28509896993637085, -0.2881402373313904, -0.09904756397008896, -0.21588392555713654, -0.34885889291763306, 0.014671487733721733, 0.12399585545063019, 0.6821789145469666, -0.4296543002128601, -0.3143979012966156, -0.26607659459114075, -0.18821142613887787, 0.26214635372161865, -0.11035527288913727, 0.37083300948143005, -0.46191635727882385, -0.3983452320098877, -0.43128424882888794, -0.024008022621273994, 0.725978672504425, -0.08356671035289764, 0.7196685671806335, 0.42512452602386475, -0.3253535330295563, 0.0038003677036613226, -0.2771799862384796, -0.2691943943500519, -0.5812622308731079, 0.2553686797618866, -0.486773818731308, -1.0096222162246704, 0.8086054921150208, 0.5136845111846924, 0.060080792754888535, 1.1170591115951538, 0.624014675617218, -0.3089700937271118, 1.0229243040084839, 0.05041861534118652, 0.31363824009895325, 0.37392082810401917, -0.69329833984375, 0.11408568918704987, -0.929048478603363, -0.31719741225242615, -0.5732797384262085, -0.49210071563720703, -0.7286896109580994, -0.07964185625314713, 0.2904628813266754, 0.1409023553133011, -0.6597731113433838, 0.5479429364204407, -0.8304439783096313, 0.5358166098594666, 0.5792725682258606, 0.2991839051246643, 0.1646771878004074, -0.16194845736026764, -0.41637855768203735, -0.14140881597995758, -0.4570004642009735, -0.24164101481437683, 1.2193795442581177, 0.2796541154384613, 0.7539262175559998, 0.07676433026790619, 0.8817706108093262, 0.07440859824419022, -0.07722073048353195, -0.5718594193458557, 0.6394471526145935, 0.13688132166862488, -0.7563934922218323, -0.40848827362060547, -0.5148059129714966, -1.1134876012802124, 0.39007315039634705, -0.13834868371486664, -0.8353317379951477, 0.11264543980360031, 0.008734598755836487, -0.2045353204011917, 0.4974296987056732, -0.544911801815033, 0.8359077572822571, -0.13358813524246216, -0.4757594168186188, 0.08244620263576508, -0.8210134506225586, 0.47396016120910645, 0.19179709255695343, 0.2628538906574249, 0.06100628897547722, 0.25701314210891724, 1.1794800758361816, -0.8263648748397827, 0.42070484161376953, 0.09161785989999771, 0.021188165992498398, 0.2997320890426636, -0.1753307282924652, 0.4728465974330902, 0.06601378321647644, -0.00014172396913636476, -0.1267254650592804, 0.2828192412853241, -0.8630242943763733, -0.041452258825302124, 0.9279593825340271, -0.9536834359169006, -0.5985608696937561, -0.9041809439659119, -0.5448225736618042, 0.08664677292108536, 0.5818002223968506, 0.38103145360946655, 0.5407728552818298, -0.0031979396007955074, 0.44716569781303406, 0.8467976450920105, -0.1484536975622177, 0.5652457475662231, 0.2657727599143982, 0.0916040763258934, -0.6628896594047546, 0.8300284743309021, 0.10165157169103622, 0.3488891124725342, 0.2431201934814453, 0.40519458055496216, -0.5130061507225037, -0.2045750916004181, -0.21084073185920715, 0.5171689987182617, -0.615612268447876, -0.2671227753162384, -0.3482474982738495, -0.4008588194847107, -0.7565808892250061, -0.6361992359161377, -0.30004894733428955, -0.49272283911705017, -0.47098657488822937, -0.48992812633514404, 0.572900116443634, 0.4734519422054291, -0.4089369475841522, 0.003066630568355322, -0.48429441452026367, 0.26435479521751404, 0.3221375346183777, 0.5554684996604919, -0.38366493582725525, -0.5526167154312134, 0.0227255467325449, -0.11955206096172333, -0.5805175304412842, -0.9742748737335205, 0.33533695340156555, -0.07658392190933228, 0.5213249921798706, 0.6317113041877747, 0.08413282036781311, 0.8650790452957153, -0.21341191232204437, 1.02383553981781, 0.3288201093673706, -0.7730939984321594, 0.745389461517334, -0.3433898985385895, 0.15284885466098785, 0.6162274479866028, 0.16735614836215973, -0.19996802508831024, -0.697945237159729, -1.3199145793914795, -0.7801840901374817, 0.6552321910858154, 0.3908534348011017, -0.29075929522514343, 0.062383439391851425, 0.14320334792137146, -0.324843168258667, -0.19444918632507324, -0.6879701614379883, -0.9238006472587585, -0.12046707421541214, -0.5104453563690186, 0.12379628419876099, 0.01773056387901306, -0.413984477519989, -0.8568776249885559, 0.9331581592559814, 0.015926750376820564, 0.592253565788269, 0.48552560806274414, 0.07827001810073853, 0.032754477113485336, 0.47775882482528687, 0.9474315643310547, 0.7667987942695618, -0.4631150960922241, 0.42316269874572754, 0.39794036746025085, -1.0573581457138062, 0.4634837806224823, 0.36066630482673645, -0.08197113126516342, -0.027622442692518234, 0.4549313187599182, 0.3886334300041199, 0.08223266154527664, -0.21028374135494232, 0.590116024017334, -0.0029548618476837873, -0.563808023929596, -0.38078543543815613, 0.059628888964653015, -0.11124580353498459, -0.0001861408818513155, 0.39186373353004456, -0.15398919582366943, -0.026219800114631653, -0.49837836623191833, 0.48624417185783386, 0.3576012849807739, -0.4746249318122864, -0.15797995030879974, 0.7397521138191223, -0.2039392739534378, -0.13362617790699005, 0.3148946464061737, -0.1541578769683838, -0.6250593066215515, 1.1468896865844727, 0.5973964929580688, 0.6816344857215881, -0.2898561358451843, -0.0664760172367096, 0.9340434074401855, 0.3847992718219757, -0.024613643065094948, 0.5274662375450134, 0.3372247517108917, -0.23707541823387146, 0.20069611072540283, -0.8407796025276184, -0.03292518109083176, 0.1697918027639389, -0.8458260297775269, 0.33932241797447205, -0.513725996017456, -0.18834663927555084, 0.03660952299833298, 0.4293270409107208, -0.4216219484806061, 0.5283238887786865, -0.41575780510902405, 1.2195241451263428, -0.9451573491096497, 0.7459240555763245, 0.7669493556022644, -0.5515625476837158, -1.056588053703308, -0.5403633713722229, 0.010321303270757198, -0.8198801875114441, 0.5740298628807068, -0.03677806258201599, 0.17040090262889862, -0.08204544335603714, -0.7179723978042603, -0.873710572719574, 1.4033788442611694, -0.04239431023597717, -0.4124995768070221, 0.23117172718048096, -0.07117655873298645, 0.46242979168891907, 0.13653425872325897, 0.5549073815345764, 0.7585687041282654, 0.8103074431419373, -0.11651372909545898, -0.751309335231781, 0.3399543762207031, -0.5201923251152039, -0.349562406539917, 0.4576171636581421, -0.9142529964447021, 1.2428117990493774, -0.020493440330028534, 0.21060167253017426, -0.1523934006690979, 0.6349026560783386, 0.805097222328186, 0.3044986128807068, 0.3541299104690552, 0.9109293818473816, 0.8656061291694641, -0.5026825070381165, 1.008143424987793, -0.23847229778766632, 0.8839040994644165, 0.6693145632743835, 0.24590469896793365, 0.7575167417526245, 0.6779578924179077, -0.5595174431800842, 0.5584046840667725, 0.8165231943130493, -0.331522136926651, 0.40135595202445984, 0.2666315734386444, -0.12618617713451385, -0.11748873442411423, 0.4143374562263489, -0.8829432725906372, 0.10707537829875946, 0.06886439025402069, -0.3740358352661133, 0.07164616882801056, -0.4335258901119232, 0.3390346169471741, -0.054425857961177826, -0.04430031403899193, 0.35790786147117615, 0.05588769540190697, -0.43377038836479187, 0.9487165212631226, -0.12328033894300461, 0.7422763109207153, -0.5329314470291138, -0.09094859659671783, -0.37785008549690247, 0.6356786489486694, -0.4438863694667816, -1.039361596107483, 0.12757763266563416, 0.07335477322340012, -0.12351352721452713, -0.1680901050567627, 0.6969432830810547, -0.21631459891796112, -0.7763100862503052, 0.13768962025642395, 0.06343846768140793, 0.09149310737848282, 0.550914466381073, -0.6771050691604614, -0.3718774616718292, -0.04809480533003807, -0.5398738384246826, 0.11821122467517853, 0.3276572525501251, 0.2969929575920105, 0.5611812472343445, 0.6428335905075073, 0.18536332249641418, 0.40137019753456116, -0.5556888580322266, 0.8081265091896057, -1.0533068180084229, -0.7476670145988464, -0.9402781128883362, 0.43325328826904297, -0.31578925251960754, -0.8716604113578796, 1.009272813796997, 1.0544649362564087, 0.866412878036499, -0.005736834369599819, 0.6198403835296631, -0.3675990700721741, 0.25370973348617554, -0.4082694351673126, 0.9722017645835876, -0.8417947888374329, -0.2262694090604782, -0.27763569355010986, -0.6749694347381592, -0.3576492667198181, 0.8791089653968811, -0.14423386752605438, 0.024846041575074196, 1.0637024641036987, 0.6580798625946045, -0.0898304134607315, 0.027756785973906517, -0.052139099687337875, 0.5936377644538879, 0.37393227219581604, 1.0320030450820923, 0.6505182981491089, -0.7838829159736633, 0.3263046443462372, -0.5231932997703552, -0.4193836450576782, -0.40152546763420105, -0.43940630555152893, -0.8794858455657959, -0.5120660662651062, -0.20462694764137268, -0.6490301489830017, -0.15285882353782654, 1.0212621688842773, 0.4520750343799591, -0.9188032746315002, -0.4281092584133148, -0.08081922680139542, 0.16985060274600983, -0.6132648587226868, -0.41891181468963623, 0.7416631579399109, -0.09951729327440262, -0.557536244392395, 0.1726331114768982, -0.13199234008789062, 0.22384332120418549, 0.09268556535243988, -0.41814517974853516, -0.7291257977485657, 0.011868652887642384, 0.42197877168655396, 0.3323770761489868, -0.7002836465835571, -0.7008931636810303, 0.32170233130455017, -0.5641151070594788, 0.4510742723941803, 0.001423823181539774, -0.5109370350837708, 0.06309810280799866, 0.6706575155258179, 0.4712616205215454, 0.6415949463844299, -0.04760702699422836, 0.11079423874616623, -0.6237200498580933, 0.17446474730968475, -0.015488269738852978, 0.2982241213321686, -0.06440024077892303, -0.32361239194869995, 0.800947368144989, 0.6657890677452087, -0.5381688475608826, -1.0898268222808838, -0.4389532506465912, -1.46247398853302, -0.023179104551672935, 1.0863311290740967, 0.010534124448895454, -0.4820750653743744, 0.2462678849697113, -0.1515735387802124, 0.20890070497989655, -0.30289313197135925, 0.7846348285675049, 0.8134654760360718, -0.35724106431007385, 0.1139286682009697, -0.6768144369125366, 0.3785341680049896, 0.5161988139152527, -1.2021903991699219, -0.08802032470703125, 0.26713332533836365, 0.3076937198638916, 0.3415476977825165, 0.6573403477668762, -0.11866824328899384, 0.31093332171440125, 0.20176923274993896, 0.019763078540563583, -0.0016875702422112226, 0.08554363250732422, -0.2501627504825592, 0.06034327670931816, -0.25165992975234985, -0.4763832688331604 ]
open-llm-leaderboard/details_KoboldAI__OPT-6.7B-Erebus
open-llm-leaderboard
2023-10-22T01:42:22Z
200
0
[ "region:us" ]
null
2023-08-18T11:45:32Z
--- pretty_name: Evaluation run of KoboldAI/OPT-6.7B-Erebus dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [KoboldAI/OPT-6.7B-Erebus](https://huggingface.co/KoboldAI/OPT-6.7B-Erebus) on\ \ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KoboldAI__OPT-6.7B-Erebus\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T01:42:09.506783](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__OPT-6.7B-Erebus/blob/main/results_2023-10-22T01-42-09.506783.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n\ \ \"em_stderr\": 0.0003476179896857095,\n \"f1\": 0.04858431208053701,\n\ \ \"f1_stderr\": 0.0011921353872508366,\n \"acc\": 0.33522023153796765,\n\ \ \"acc_stderr\": 0.00806896604240255\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857095,\n\ \ \"f1\": 0.04858431208053701,\n \"f1_stderr\": 0.0011921353872508366\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01061410159211524,\n \ \ \"acc_stderr\": 0.0028227133223877043\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.65982636148382,\n \"acc_stderr\": 0.013315218762417397\n\ \ }\n}\n```" repo_url: https://huggingface.co/KoboldAI/OPT-6.7B-Erebus leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|arc:challenge|25_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T17:20:54.049241.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T01_42_09.506783 path: - '**/details_harness|drop|3_2023-10-22T01-42-09.506783.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T01-42-09.506783.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T01_42_09.506783 path: - '**/details_harness|gsm8k|5_2023-10-22T01-42-09.506783.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T01-42-09.506783.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hellaswag|10_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T17:20:54.049241.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:20:54.049241.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T17_20_54.049241 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T17:20:54.049241.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T17:20:54.049241.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T01_42_09.506783 path: - '**/details_harness|winogrande|5_2023-10-22T01-42-09.506783.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T01-42-09.506783.parquet' - config_name: results data_files: - split: 2023_07_19T17_20_54.049241 path: - results_2023-07-19T17:20:54.049241.parquet - split: 2023_10_22T01_42_09.506783 path: - results_2023-10-22T01-42-09.506783.parquet - split: latest path: - results_2023-10-22T01-42-09.506783.parquet --- # Dataset Card for Evaluation run of KoboldAI/OPT-6.7B-Erebus ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KoboldAI/OPT-6.7B-Erebus - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KoboldAI/OPT-6.7B-Erebus](https://huggingface.co/KoboldAI/OPT-6.7B-Erebus) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KoboldAI__OPT-6.7B-Erebus", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T01:42:09.506783](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__OPT-6.7B-Erebus/blob/main/results_2023-10-22T01-42-09.506783.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04858431208053701, "f1_stderr": 0.0011921353872508366, "acc": 0.33522023153796765, "acc_stderr": 0.00806896604240255 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04858431208053701, "f1_stderr": 0.0011921353872508366 }, "harness|gsm8k|5": { "acc": 0.01061410159211524, "acc_stderr": 0.0028227133223877043 }, "harness|winogrande|5": { "acc": 0.65982636148382, "acc_stderr": 0.013315218762417397 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.48133766651153564, -0.7300451397895813, 0.20620259642601013, 0.18494191765785217, -0.23473569750785828, -0.014784717932343483, -0.4482843577861786, -0.2427649199962616, 0.4551217555999756, 0.620697557926178, -0.6622975468635559, -0.8813230395317078, -0.5885154008865356, 0.19123055040836334, -0.057778824120759964, 1.141654133796692, -0.22637759149074554, -0.23422853648662567, 0.08062487095594406, -0.3438146710395813, -0.3824595808982849, -0.43090176582336426, -0.5585066080093384, -0.4917985796928406, 0.4920928478240967, 0.6527683138847351, 0.36184751987457275, 0.635540783405304, 0.6247130036354065, 0.38388678431510925, -0.1318719983100891, 0.162037655711174, -0.463675320148468, -0.08572100102901459, 0.27306485176086426, -0.7144560813903809, -0.7365632653236389, 0.06510528922080994, 0.689407467842102, 0.410439670085907, -0.12035586684942245, 0.5978934168815613, 0.06626985967159271, 0.5997575521469116, -0.44750139117240906, 0.39143723249435425, -0.3364658057689667, -0.05265706777572632, -0.38934269547462463, -0.1451811045408249, -0.1759696900844574, -0.3120206296443939, -0.1875964254140854, -0.5812391042709351, 0.21677425503730774, 0.18451277911663055, 1.116858720779419, 0.1671995222568512, -0.12712176144123077, -0.25419944524765015, -0.25283369421958923, 0.8342270255088806, -0.9631621241569519, -0.02208329737186432, 0.5905641913414001, 0.12523771822452545, -0.25323402881622314, -0.5164778232574463, -0.3059990406036377, -0.047326572239398956, -0.29981809854507446, 0.2470908910036087, 0.0704345852136612, -0.11266861855983734, 0.45685821771621704, 0.5740820169448853, -0.6472057700157166, 0.07636655122041702, -0.6337844729423523, -0.13585640490055084, 0.9769227504730225, 0.32048529386520386, 0.0665992945432663, -0.5563946962356567, -0.2690567970275879, -0.4152379035949707, -0.34204715490341187, 0.19204670190811157, 0.5831798315048218, 0.5031089782714844, -0.5741037726402283, 0.7846137881278992, -0.4143275320529938, 0.6147745251655579, -0.07033450901508331, -0.14794249832630157, 0.889240562915802, -0.5219526886940002, -0.2576766908168793, 0.13290603458881378, 1.023084282875061, 0.4040832817554474, 0.002790370723232627, 0.23001570999622345, -0.2524576187133789, -0.014556314796209335, 0.12709592282772064, -0.7294413447380066, -0.18322724103927612, 0.4693908989429474, -0.5989417433738708, -0.47967249155044556, 0.2513417899608612, -1.0402576923370361, -0.18978850543498993, -0.2660748362541199, 0.18205994367599487, -0.24000534415245056, -0.3956926167011261, -0.12557125091552734, -0.16121526062488556, 0.39181074500083923, 0.06441967934370041, -0.6554834246635437, 0.30721428990364075, 0.5921643376350403, 0.9476296305656433, -0.09581393003463745, -0.36241409182548523, -0.2427167147397995, -0.26781824231147766, -0.22087520360946655, 0.46900680661201477, -0.20756253600120544, -0.3819102346897125, -0.13357992470264435, 0.3888656795024872, -0.32039138674736023, -0.5795352458953857, 0.8229045271873474, -0.2743091881275177, 0.09755687415599823, -0.22192618250846863, -0.4816701412200928, -0.18437927961349487, 0.28617122769355774, -0.7654815912246704, 1.4002577066421509, 0.31914034485816956, -0.8917016983032227, 0.15841934084892273, -0.8375710248947144, -0.22629614174365997, 0.08424385637044907, 0.037625547498464584, -0.7161247134208679, -0.0659620463848114, 0.15692651271820068, 0.5491390824317932, -0.15899451076984406, 0.0930328443646431, -0.41950833797454834, -0.35252928733825684, 0.13739722967147827, -0.05170721560716629, 1.0508966445922852, 0.2657320201396942, -0.4547464847564697, 0.08048505336046219, -1.0100611448287964, 0.2306797206401825, 0.39635419845581055, -0.4677475690841675, -0.2758595943450928, -0.24190114438533783, 0.16428415477275848, 0.03753739595413208, 0.5359095335006714, -0.5669373869895935, 0.2996327579021454, -0.2219177931547165, 0.3593059182167053, 1.0010451078414917, -0.025147896260023117, 0.3344626724720001, -0.41169747710227966, 0.4121471345424652, 0.03337676450610161, 0.34528830647468567, 0.022130610421299934, -0.549523651599884, -0.910739541053772, -0.18755313754081726, 0.21272212266921997, 0.691228985786438, -0.49074387550354004, 0.7522514462471008, -0.42952609062194824, -0.7822439670562744, -0.723358154296875, 0.060301195830106735, 0.3410840332508087, 0.4517994225025177, 0.29018425941467285, -0.1352577656507492, -0.7273643612861633, -1.0724812746047974, -0.04989880323410034, -0.1537032127380371, 0.0848010703921318, 0.5442290902137756, 1.072986364364624, -0.2515435516834259, 0.6733540296554565, -0.6928189396858215, -0.2687149941921234, -0.2856585383415222, 0.07145869731903076, 0.8881605863571167, 0.5053700804710388, 0.46676790714263916, -0.7648964524269104, -0.3802492320537567, 0.07904412597417831, -0.8258616924285889, -0.280577152967453, -0.1473689079284668, -0.29354771971702576, 0.3949001431465149, -0.08618834614753723, -0.49867546558380127, 0.5100449323654175, 0.5115879774093628, -0.5706842541694641, 0.6132458448410034, -0.10736231505870819, 0.4633282721042633, -1.2204668521881104, 0.2194342017173767, 0.015097193419933319, -0.0049667502753436565, -0.4668796956539154, -0.12124169617891312, 0.08249156177043915, 0.26415592432022095, -0.39116254448890686, 0.6059766411781311, -0.41535505652427673, -0.22186264395713806, 0.0724048763513565, 0.1888485997915268, -0.18242548406124115, 0.589539647102356, -0.2555970251560211, 0.6805160045623779, 0.5102023482322693, -0.41423287987709045, 0.45971712470054626, 0.4055282771587372, -0.4985259175300598, 0.3338167667388916, -0.5696435570716858, 0.00681386748328805, 0.14522132277488708, 0.12466435879468918, -0.8243927955627441, -0.46011239290237427, 0.5473156571388245, -0.6277933120727539, 0.2447996586561203, -0.2697598934173584, -0.599960207939148, -0.48927435278892517, -0.44921568036079407, 0.18431538343429565, 0.5663337111473083, -0.47772687673568726, 0.38152074813842773, 0.3734285831451416, -0.0905693918466568, -0.5711572170257568, -0.6795706152915955, -0.19544954597949982, -0.40388622879981995, -0.6717066764831543, 0.31161266565322876, -0.16120636463165283, -0.24959038197994232, 0.02049735188484192, -0.013714710250496864, -0.03237532079219818, 0.09848344326019287, 0.3284429609775543, 0.630047619342804, -0.045763783156871796, -0.31967735290527344, -0.15944674611091614, -0.17812149226665497, 0.10000225901603699, 0.12794455885887146, 0.5415897965431213, -0.28725600242614746, -0.28938302397727966, -0.17171959578990936, 0.15148024260997772, 0.47924888134002686, -0.1202954649925232, 0.794805645942688, 0.6717425584793091, -0.27019035816192627, -0.05413295328617096, -0.44016337394714355, 0.04096757993102074, -0.4623721241950989, 0.40020811557769775, -0.30689138174057007, -0.7549293041229248, 0.8958768844604492, 0.18339580297470093, 0.310061514377594, 0.7301064133644104, 0.5527443885803223, 0.11837739497423172, 0.8316062092781067, 0.23522834479808807, -0.14673466980457306, 0.5740041136741638, -0.7556771636009216, -0.035320114344358444, -1.076296329498291, -0.4218321740627289, -0.512412965297699, -0.42178308963775635, -0.8317651748657227, -0.3068678081035614, 0.2291029691696167, 0.14875425398349762, -0.3804985284805298, 0.5245288014411926, -0.7252799272537231, 0.21127735078334808, 0.6877184510231018, 0.33381563425064087, -0.05266556888818741, -0.007958154194056988, -0.015943704172968864, 0.2383641004562378, -0.5926586985588074, -0.4224059581756592, 1.3558807373046875, 0.271803617477417, 0.6434304714202881, -0.13918527960777283, 1.0422890186309814, 0.3073599636554718, 0.35998794436454773, -0.4877544641494751, 0.6169173717498779, 0.01293095201253891, -0.6133308410644531, -0.2530611753463745, -0.6410430669784546, -0.9045926928520203, 0.24682225286960602, -0.009096155874431133, -0.9283974170684814, 0.12918443977832794, 0.020832734182476997, -0.06036603823304176, 0.3591051399707794, -0.6819479465484619, 0.9036718010902405, -0.33354657888412476, -0.3895145356655121, 0.0844249352812767, -0.8406153321266174, 0.30874890089035034, 0.03726007044315338, 0.4168970584869385, -0.18425756692886353, -0.016339002177119255, 1.1850179433822632, -0.5569279193878174, 0.8160699605941772, -0.14632001519203186, 0.14259476959705353, 0.40114450454711914, -0.45440107583999634, 0.6519214510917664, -0.07458953559398651, -0.20651568472385406, 0.4866247773170471, -0.1696043759584427, -0.3843107521533966, -0.31957200169563293, 0.9184275269508362, -0.8890695571899414, -0.244483083486557, -0.46005257964134216, -0.5284227728843689, 0.32160866260528564, 0.34962978959083557, 0.43062740564346313, 0.3203166425228119, 0.06399675458669662, 0.2685620188713074, 0.42411208152770996, -0.17289941012859344, 0.4587939977645874, 0.3575946092605591, -0.15181715786457062, -0.8671291470527649, 0.7486987709999084, 0.2731107473373413, 0.06679069250822067, 0.10811345279216766, -0.01957281306385994, -0.5344504714012146, -0.33898913860321045, -0.4346493184566498, 0.3750503361225128, -0.6296417713165283, -0.3028850257396698, -0.38293781876564026, -0.22519607841968536, -0.4386226236820221, -0.12150026112794876, -0.4192398488521576, -0.3314594626426697, -0.40927401185035706, -0.22272656857967377, 0.5770246982574463, 0.5679504871368408, -0.2927083373069763, 0.24146096408367157, -0.763178288936615, 0.1859995722770691, -0.2501533031463623, 0.46850883960723877, -0.13672134280204773, -0.5033753514289856, -0.4394061267375946, 0.21069873869419098, -0.3624001443386078, -0.970550537109375, 0.6023023724555969, -0.04584150016307831, 0.7625839710235596, 0.15349434316158295, 0.21524643898010254, 0.7524688839912415, -0.23348508775234222, 0.9630261659622192, -0.007839756086468697, -0.6331285238265991, 0.7934688329696655, -0.284188449382782, 0.16048310697078705, 0.5025874972343445, 0.131667360663414, -0.38669946789741516, -0.3172215223312378, -1.0004031658172607, -1.1677616834640503, 1.1342461109161377, 0.6291218996047974, -0.3984413743019104, 0.1197434589266777, 0.32786867022514343, -0.09201788902282715, 0.14995014667510986, -0.6841029524803162, -0.8120630979537964, -0.14300383627414703, -0.36148685216903687, -0.20376189053058624, -0.07537402957677841, -0.4003813862800598, -0.3400103449821472, 0.9591521620750427, 0.024666402488946915, 0.5009535551071167, 0.2958017885684967, -0.08000766485929489, -0.05890238657593727, 0.3306792378425598, 0.4909805953502655, 0.6631699800491333, -0.356421560049057, -0.11870958656072617, 0.3687138855457306, -0.5840341448783875, -0.025982987135648727, 0.37161773443222046, -0.1335948407649994, 0.020070532336831093, 0.543022871017456, 1.0151551961898804, 0.16364607214927673, -0.38684940338134766, 0.3743104636669159, 0.12077457457780838, -0.3263654410839081, -0.5449466705322266, 0.058272022753953934, -0.08064761757850647, 0.40337666869163513, 0.36052703857421875, -0.11185575276613235, 0.007224396336823702, -0.2502506375312805, 0.20616887509822845, 0.16369792819023132, -0.10639186203479767, -0.2560383379459381, 0.5808191895484924, 0.0010826432844623923, -0.38277724385261536, 0.7981686592102051, -0.028651658445596695, -0.6268160939216614, 1.1688650846481323, 0.3506905138492584, 0.9016192555427551, -0.08384991437196732, 0.10869601368904114, 0.6503221392631531, 0.3960081934928894, -0.1306203454732895, 0.5319783091545105, 0.09472775459289551, -0.5229619741439819, -0.15882562100887299, -0.7825973629951477, -0.1412346065044403, 0.3192555904388428, -1.0313849449157715, 0.4120359718799591, -0.010027616284787655, -0.3277336061000824, -0.12846848368644714, 0.3925018310546875, -0.8120686411857605, 0.08789830654859543, 0.05399462208151817, 0.9149593114852905, -1.0721986293792725, 0.4856030344963074, 0.9350255131721497, -0.4526503086090088, -0.8804592490196228, -0.45460835099220276, -0.08000506460666656, -0.7527480721473694, 0.3948819637298584, 0.36385414004325867, 0.5250835418701172, -0.1396757960319519, -0.724137544631958, -0.9321662187576294, 1.5076279640197754, 0.09820573776960373, -0.5546586513519287, 0.17312727868556976, 0.1517500877380371, 0.3745456039905548, -0.29600030183792114, 0.5747568011283875, 0.6778172850608826, 0.8099295496940613, -0.10693350434303284, -0.8620048761367798, 0.2435043901205063, -0.5178192853927612, -0.05173514038324356, 0.4364999532699585, -0.8832507133483887, 0.9441165924072266, -0.16278719902038574, -0.010651540011167526, 0.01670924946665764, 0.41261476278305054, 0.5908976197242737, 0.43120449781417847, 0.4984016418457031, 0.8081563711166382, 0.6223151683807373, -0.4462774991989136, 1.0226751565933228, -0.35141974687576294, 0.9547020792961121, 1.0283184051513672, 0.08919679373502731, 0.719886302947998, 0.3905869722366333, -0.42115476727485657, 0.440713107585907, 0.9479486346244812, -0.43559756875038147, 0.40026766061782837, 0.09052236378192902, 0.01893383264541626, -0.16914282739162445, 0.11719786375761032, -0.4688003659248352, 0.2637580335140228, 0.18500392138957977, -0.621491551399231, -0.1698152869939804, -0.2600901424884796, 0.12739810347557068, -0.3712999224662781, -0.2971899211406708, 0.6071256399154663, -0.006248327437788248, -0.6276865601539612, 0.6864070892333984, -0.2015574723482132, 0.6334173083305359, -0.7429335713386536, -0.20751725137233734, -0.14390011131763458, 0.3143322765827179, -0.5029621124267578, -0.9854879379272461, 0.13866184651851654, 0.010527623817324638, -0.20791512727737427, -0.12314356863498688, 0.7168915271759033, -0.30959904193878174, -0.6229995489120483, 0.3938373029232025, 0.26754769682884216, 0.3849395513534546, 0.18139192461967468, -0.9047293663024902, 0.28150302171707153, 0.27467983961105347, -0.7925897836685181, 0.34957045316696167, 0.28462761640548706, 0.12513086199760437, 0.5794626474380493, 0.7154086828231812, 0.12644925713539124, 0.20499099791049957, -0.11243883520364761, 1.0887763500213623, -0.7047857046127319, -0.4437272250652313, -0.7767442464828491, 0.8348397016525269, -0.41661956906318665, -0.605781078338623, 0.8591684699058533, 0.9643455147743225, 0.8825366497039795, 0.015720194205641747, 0.8490870594978333, -0.6087490916252136, 0.40872618556022644, -0.33678683638572693, 0.8686825633049011, -0.7135569453239441, 0.1793847233057022, -0.19759219884872437, -0.8794832229614258, 0.030663834884762764, 0.6983897089958191, -0.2604827284812927, 0.00815429538488388, 0.5316691994667053, 0.9426569938659668, 0.05865757539868355, 0.13658782839775085, 0.01500650029629469, 0.4604378640651703, 0.23600715398788452, 0.6369469165802002, 0.5476988554000854, -0.728426456451416, 0.48779186606407166, -0.7010607123374939, -0.5083374977111816, -0.21166181564331055, -0.6471003890037537, -0.8023924231529236, -0.5322204232215881, -0.2584584951400757, -0.4743547737598419, 0.027305971831083298, 0.8771083950996399, 0.44470086693763733, -0.8437482118606567, -0.360866904258728, -0.03755820915102959, 0.19210399687290192, -0.22677068412303925, -0.36628076434135437, 0.5552466511726379, -0.16140373051166534, -0.8407953381538391, 0.14472605288028717, -0.1680576354265213, -0.19960235059261322, 0.01701566018164158, -0.26646485924720764, -0.31591174006462097, -0.2916981875896454, 0.401379257440567, 0.20699235796928406, -0.68289715051651, -0.29585713148117065, -0.06953305006027222, 0.05840393528342247, 0.20632588863372803, 0.3293165862560272, -0.525974690914154, 0.17520904541015625, 0.6585233211517334, 0.20271579921245575, 0.5470326542854309, 0.025189081206917763, 0.17980517446994781, -0.571674108505249, -0.03731554001569748, 0.008386080153286457, 0.5713040828704834, 0.14633798599243164, -0.4754968583583832, 0.9765302538871765, 0.33601897954940796, -0.848713219165802, -0.9413332939147949, -0.2183462381362915, -1.2071126699447632, 0.0438358448445797, 1.4180999994277954, -0.3264474868774414, -0.5000386238098145, 0.10399096459150314, -0.2525942027568817, 0.41982418298721313, -0.7018945217132568, 0.48803478479385376, 0.7559267282485962, -0.2970164716243744, 0.011799524538218975, -0.6622042059898376, 0.20596259832382202, 0.04571586102247238, -0.9272357225418091, -0.004019468557089567, 0.32018840312957764, 0.3968450725078583, 0.30510637164115906, 0.5901837944984436, -0.07139264047145844, -0.1073911115527153, 0.10509324818849564, 0.23536428809165955, -0.23587489128112793, -0.07622067630290985, -0.19829948246479034, 0.015553239732980728, -0.4821866452693939, -0.49088263511657715 ]
open-llm-leaderboard/details_KoboldAI__OPT-13B-Nerys-v2
open-llm-leaderboard
2023-10-22T00:09:49Z
200
0
[ "region:us" ]
null
2023-08-18T11:46:35Z
--- pretty_name: Evaluation run of KoboldAI/OPT-13B-Nerys-v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [KoboldAI/OPT-13B-Nerys-v2](https://huggingface.co/KoboldAI/OPT-13B-Nerys-v2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KoboldAI__OPT-13B-Nerys-v2\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-22T00:09:36.739162](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__OPT-13B-Nerys-v2/blob/main/results_2023-10-22T00-09-36.739162.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n\ \ \"em_stderr\": 0.0003630560893118994,\n \"f1\": 0.052374161073825636,\n\ \ \"f1_stderr\": 0.001268760566280153,\n \"acc\": 0.3405215977041276,\n\ \ \"acc_stderr\": 0.007217878569712872\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.0003630560893118994,\n\ \ \"f1\": 0.052374161073825636,\n \"f1_stderr\": 0.001268760566280153\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \ \ \"acc_stderr\": 0.0013121578148674266\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6787687450670876,\n \"acc_stderr\": 0.013123599324558317\n\ \ }\n}\n```" repo_url: https://huggingface.co/KoboldAI/OPT-13B-Nerys-v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|arc:challenge|25_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T18:46:37.808962.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_22T00_09_36.739162 path: - '**/details_harness|drop|3_2023-10-22T00-09-36.739162.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-22T00-09-36.739162.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_22T00_09_36.739162 path: - '**/details_harness|gsm8k|5_2023-10-22T00-09-36.739162.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-22T00-09-36.739162.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hellaswag|10_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:46:37.808962.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:46:37.808962.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T18_46_37.808962 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:46:37.808962.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T18:46:37.808962.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_22T00_09_36.739162 path: - '**/details_harness|winogrande|5_2023-10-22T00-09-36.739162.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-22T00-09-36.739162.parquet' - config_name: results data_files: - split: 2023_07_19T18_46_37.808962 path: - results_2023-07-19T18:46:37.808962.parquet - split: 2023_10_22T00_09_36.739162 path: - results_2023-10-22T00-09-36.739162.parquet - split: latest path: - results_2023-10-22T00-09-36.739162.parquet --- # Dataset Card for Evaluation run of KoboldAI/OPT-13B-Nerys-v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KoboldAI/OPT-13B-Nerys-v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KoboldAI/OPT-13B-Nerys-v2](https://huggingface.co/KoboldAI/OPT-13B-Nerys-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KoboldAI__OPT-13B-Nerys-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T00:09:36.739162](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__OPT-13B-Nerys-v2/blob/main/results_2023-10-22T00-09-36.739162.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.0003630560893118994, "f1": 0.052374161073825636, "f1_stderr": 0.001268760566280153, "acc": 0.3405215977041276, "acc_stderr": 0.007217878569712872 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.0003630560893118994, "f1": 0.052374161073825636, "f1_stderr": 0.001268760566280153 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148674266 }, "harness|winogrande|5": { "acc": 0.6787687450670876, "acc_stderr": 0.013123599324558317 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.44557544589042664, -0.6830132603645325, 0.1997780203819275, 0.17139610648155212, -0.19753478467464447, 0.016945067793130875, -0.3832869827747345, -0.2401285022497177, 0.42742231488227844, 0.6268824934959412, -0.7643768787384033, -0.859330952167511, -0.652812123298645, 0.24285301566123962, -0.09550809860229492, 1.0674327611923218, -0.21530669927597046, -0.202640563249588, 0.05696410313248634, -0.2759280204772949, -0.3724346458911896, -0.43323856592178345, -0.5558238625526428, -0.4644705653190613, 0.43858373165130615, 0.7072395086288452, 0.36272236704826355, 0.6159231662750244, 0.6914623379707336, 0.3744853436946869, -0.13308776915073395, 0.22922512888908386, -0.4678661823272705, -0.07812602072954178, 0.2762298882007599, -0.6671340465545654, -0.7307549715042114, 0.11063442379236221, 0.6909516453742981, 0.406358540058136, -0.07708130776882172, 0.5846503973007202, 0.12591074407100677, 0.5831539034843445, -0.46671587228775024, 0.382991224527359, -0.3803442120552063, -0.07181276381015778, -0.36578789353370667, -0.18492594361305237, -0.14631062746047974, -0.3851145803928375, -0.1619529277086258, -0.6098828911781311, 0.17224416136741638, 0.12204165011644363, 1.072867751121521, 0.19791920483112335, -0.2062682807445526, -0.317160964012146, -0.2232825607061386, 0.8984304070472717, -1.0196727514266968, -0.050354551523923874, 0.6394388675689697, 0.14747631549835205, -0.25044533610343933, -0.5387425422668457, -0.2929058074951172, -0.1361016035079956, -0.293296217918396, 0.23180750012397766, 0.0005811087903566658, -0.15913744270801544, 0.410317599773407, 0.5799342393875122, -0.669761598110199, 0.11371178179979324, -0.6243540644645691, -0.18900258839130402, 0.999252200126648, 0.3785676956176758, 0.08584871143102646, -0.5338137149810791, -0.2521263062953949, -0.38638514280319214, -0.3961775302886963, 0.11904052644968033, 0.6313806772232056, 0.534051239490509, -0.6143481731414795, 0.8495110273361206, -0.45645245909690857, 0.6166513562202454, 0.039350107312202454, -0.20132800936698914, 0.9148303866386414, -0.5711833238601685, -0.22821027040481567, 0.1305365413427353, 1.0193198919296265, 0.4608333706855774, 0.019642654806375504, 0.21147392690181732, -0.19484524428844452, 0.01833685301244259, 0.06526292860507965, -0.8293724656105042, -0.1779012531042099, 0.3753419518470764, -0.6299440860748291, -0.515143096446991, 0.204703226685524, -1.0374335050582886, -0.23489779233932495, -0.34693458676338196, 0.2253950536251068, -0.1997544765472412, -0.3836313784122467, -0.11073899269104004, -0.10041159391403198, 0.36775287985801697, 0.077006496489048, -0.6665209531784058, 0.3524405360221863, 0.6074017286300659, 0.9507624506950378, -0.09899293631315231, -0.30920812487602234, -0.31741344928741455, -0.335860937833786, -0.24605071544647217, 0.446855366230011, -0.2229849249124527, -0.35923197865486145, -0.16064809262752533, 0.4513855278491974, -0.34595468640327454, -0.6434449553489685, 0.7405539751052856, -0.23197366297245026, 0.15719905495643616, -0.24508219957351685, -0.48400673270225525, -0.13586586713790894, 0.3499535620212555, -0.7300235629081726, 1.4128485918045044, 0.3356010615825653, -0.8891428112983704, 0.14239749312400818, -0.7578199505805969, -0.20963042974472046, 0.08523828536272049, 0.06065531447529793, -0.6406001448631287, -0.051345616579055786, 0.13478226959705353, 0.5352907776832581, -0.24279922246932983, 0.09966320544481277, -0.3651310205459595, -0.34469547867774963, 0.09506740421056747, -0.11082495003938675, 1.0209226608276367, 0.21334488689899445, -0.47114646434783936, 0.06997191905975342, -1.0445127487182617, 0.1948052942752838, 0.3253606855869293, -0.4738047420978546, -0.19897665083408356, -0.29831498861312866, 0.2100953310728073, 0.15604405105113983, 0.5994498133659363, -0.6003548502922058, 0.29669278860092163, -0.2762214243412018, 0.4283091127872467, 0.9755228161811829, 0.011891587637364864, 0.3733965754508972, -0.4077218770980835, 0.49460893869400024, -0.04297417774796486, 0.3794725239276886, 0.09754012525081635, -0.5587576627731323, -0.8336336016654968, -0.1032889261841774, 0.1470751315355301, 0.6996272802352905, -0.5435358285903931, 0.7033387422561646, -0.3350916802883148, -0.8179388642311096, -0.6759870648384094, 0.07387413829565048, 0.3951002359390259, 0.499293714761734, 0.29984745383262634, -0.1752900779247284, -0.7674388885498047, -1.0538735389709473, -0.012984584085643291, -0.1700664907693863, 0.1591569483280182, 0.5779082179069519, 1.0481946468353271, -0.2879488170146942, 0.6406430006027222, -0.6365953087806702, -0.28897824883461, -0.2811416983604431, 0.06233755126595497, 0.8999578356742859, 0.5341941714286804, 0.4529916048049927, -0.7351583242416382, -0.3890373110771179, 0.08425375074148178, -0.8097477555274963, -0.2983914613723755, -0.14797060191631317, -0.31165561079978943, 0.3912530839443207, 0.029147973284125328, -0.4444185793399811, 0.5247456431388855, 0.5840119123458862, -0.5859725475311279, 0.6686505675315857, -0.08706765621900558, 0.37986037135124207, -1.2339508533477783, 0.1604069322347641, 0.014122680760920048, 0.06325192749500275, -0.46211522817611694, -0.16039060056209564, 0.05711933970451355, 0.316874623298645, -0.40527257323265076, 0.6632921695709229, -0.40472111105918884, -0.1899387538433075, 0.0007851276895962656, 0.10312264412641525, -0.16826246678829193, 0.5159576535224915, -0.23819229006767273, 0.7274546027183533, 0.5122960805892944, -0.37788882851600647, 0.39337581396102905, 0.42921698093414307, -0.5223920941352844, 0.3412638306617737, -0.5717741847038269, -0.01201794296503067, 0.14932477474212646, 0.08147236704826355, -0.865624189376831, -0.48866763710975647, 0.5355751514434814, -0.6283212304115295, 0.19721516966819763, -0.309133380651474, -0.600616455078125, -0.4578174650669098, -0.3926437795162201, 0.21675583720207214, 0.5451327562332153, -0.47004109621047974, 0.35697638988494873, 0.3502589166164398, -0.05813386291265488, -0.5642021298408508, -0.7748351097106934, -0.14582739770412445, -0.38778579235076904, -0.6552139520645142, 0.36460956931114197, -0.17282511293888092, -0.2682133913040161, 0.040698014199733734, -0.06304046511650085, -0.11219324171543121, 0.1173342913389206, 0.37527161836624146, 0.5981624722480774, -0.08126624673604965, -0.3396141529083252, -0.1890433430671692, -0.2531231641769409, 0.10839961469173431, 0.08580051362514496, 0.598085343837738, -0.2507595419883728, -0.2831052243709564, -0.2820245921611786, 0.13095791637897491, 0.5189061760902405, -0.1357659101486206, 0.8460469841957092, 0.672487735748291, -0.26698037981987, -0.00007521289080614224, -0.4496166706085205, -0.003923782613128424, -0.4803175628185272, 0.38179895281791687, -0.22717909514904022, -0.8472477197647095, 0.8312773704528809, 0.1905844658613205, 0.21322359144687653, 0.7724987864494324, 0.5764409899711609, 0.13930384814739227, 0.8174630999565125, 0.2648119628429413, -0.13087941706180573, 0.5631526112556458, -0.7785576581954956, 0.029602685943245888, -1.0611966848373413, -0.514629065990448, -0.5017415285110474, -0.41536664962768555, -0.8381615877151489, -0.32413914799690247, 0.22687186300754547, 0.16443513333797455, -0.4020738899707794, 0.567257285118103, -0.7297030687332153, 0.21995805203914642, 0.7067069411277771, 0.23923563957214355, -0.061655789613723755, -0.03965340927243233, -0.006681931205093861, 0.18608377873897552, -0.5672041773796082, -0.3957465589046478, 1.3631318807601929, 0.17767813801765442, 0.6411938667297363, -0.11278188228607178, 1.059775471687317, 0.25162023305892944, 0.28723275661468506, -0.5109904408454895, 0.6085044145584106, -0.056601155549287796, -0.5564348101615906, -0.196806862950325, -0.6336163878440857, -0.9069406986236572, 0.1787838637828827, -0.02117546647787094, -0.9095427393913269, 0.2339537888765335, 0.019290737807750702, -0.06822296231985092, 0.3215711712837219, -0.6429232954978943, 0.9051899313926697, -0.2823430597782135, -0.40240320563316345, 0.10274630039930344, -0.8091057538986206, 0.3446901738643646, 0.022840166464447975, 0.37984031438827515, -0.21835866570472717, 0.03715495020151138, 1.1277439594268799, -0.616786003112793, 0.799354612827301, -0.15181508660316467, 0.12095574289560318, 0.4156893193721771, -0.40104591846466064, 0.6465678811073303, -0.07221993803977966, -0.2577241361141205, 0.39627110958099365, -0.16905367374420166, -0.301914244890213, -0.29895660281181335, 0.9164815545082092, -0.8500896096229553, -0.17492327094078064, -0.4458616077899933, -0.567783772945404, 0.3362201154232025, 0.3121902644634247, 0.4711850583553314, 0.41757190227508545, 0.11572328954935074, 0.30795931816101074, 0.3391086459159851, -0.1797972470521927, 0.5058132410049438, 0.3673299252986908, -0.1132984310388565, -0.8322477340698242, 0.7637749910354614, 0.33406224846839905, 0.10816983133554459, 0.14433397352695465, -0.00471584452316165, -0.5541157126426697, -0.3952557444572449, -0.35327544808387756, 0.3243601322174072, -0.5733323097229004, -0.3307472765445709, -0.41532573103904724, -0.2615421414375305, -0.432904988527298, -0.11702033877372742, -0.40800556540489197, -0.38386422395706177, -0.453446626663208, -0.2418881505727768, 0.6272956132888794, 0.6194266080856323, -0.36620423197746277, 0.292315274477005, -0.7026711106300354, 0.18129973113536835, -0.1960880160331726, 0.39139002561569214, -0.1176903247833252, -0.5201005935668945, -0.39500534534454346, 0.15911930799484253, -0.3305291533470154, -0.95123690366745, 0.6499680876731873, -0.052473101764917374, 0.7215387225151062, 0.13113945722579956, 0.1546778827905655, 0.7451795339584351, -0.2016662210226059, 0.9508798718452454, -0.016079233959317207, -0.6649008989334106, 0.7647697925567627, -0.271440714597702, 0.1805761307477951, 0.5710496306419373, 0.10085909068584442, -0.45085611939430237, -0.24677567183971405, -0.9491320252418518, -1.1652476787567139, 1.1167519092559814, 0.6304699182510376, -0.3297489583492279, 0.06989065557718277, 0.3406074345111847, -0.09860552102327347, 0.14842261373996735, -0.6661251187324524, -0.769283652305603, -0.09717148542404175, -0.35506877303123474, -0.2089313566684723, -0.13653816282749176, -0.3005659580230713, -0.36307603120803833, 1.0013824701309204, -0.02489374205470085, 0.45258617401123047, 0.2546825408935547, -0.0674804076552391, 0.0055897412821650505, 0.25680646300315857, 0.44362500309944153, 0.6572291851043701, -0.36575281620025635, -0.11084239184856415, 0.334077924489975, -0.5849171280860901, 0.008929003961384296, 0.3760001063346863, -0.1338319629430771, -0.004599937703460455, 0.6044414639472961, 0.9661427736282349, 0.17501826584339142, -0.3931468725204468, 0.4134061336517334, 0.11626724898815155, -0.42311376333236694, -0.4984111785888672, 0.02701176330447197, -0.07345141470432281, 0.4219198226928711, 0.325470894575119, -0.05350517854094505, 0.046696387231349945, -0.258933961391449, 0.25216153264045715, 0.18513762950897217, -0.18051941692829132, -0.23806124925613403, 0.5692152380943298, -0.13126997649669647, -0.3684256672859192, 0.7864446043968201, -0.12804539501667023, -0.5698114633560181, 1.2007708549499512, 0.3590693771839142, 0.902515709400177, -0.10815392434597015, 0.18517117202281952, 0.6826623678207397, 0.43219465017318726, -0.13068200647830963, 0.616598904132843, 0.14634686708450317, -0.5199354887008667, -0.26406651735305786, -0.7600816488265991, -0.14653977751731873, 0.3079510033130646, -1.07015061378479, 0.3756518065929413, -0.060415029525756836, -0.27971845865249634, -0.14734134078025818, 0.4153915345668793, -0.8065937161445618, 0.12908314168453217, 0.06296776980161667, 0.9248521327972412, -1.1059194803237915, 0.49487176537513733, 0.8445136547088623, -0.4121776521205902, -0.927588701248169, -0.42850276827812195, -0.011838122271001339, -0.7964744567871094, 0.43339380621910095, 0.370181143283844, 0.481796532869339, -0.16939319670200348, -0.7204458713531494, -1.0749400854110718, 1.5102465152740479, 0.14172221720218658, -0.5917104482650757, 0.1871669590473175, 0.1698572337627411, 0.36215487122535706, -0.28890326619148254, 0.5462585091590881, 0.7034212946891785, 0.7796178460121155, -0.04243454709649086, -0.8636760711669922, 0.22565171122550964, -0.5630088448524475, -0.12639117240905762, 0.3798457980155945, -0.8543258309364319, 1.0347918272018433, -0.09044814109802246, 0.04984888806939125, 0.09437407553195953, 0.3410423696041107, 0.5990110039710999, 0.4459018409252167, 0.4350956380367279, 0.7830488681793213, 0.6675164103507996, -0.41711127758026123, 1.0233900547027588, -0.3533206582069397, 0.8983320593833923, 1.1179530620574951, 0.0628877505660057, 0.6848246455192566, 0.3780701756477356, -0.36083701252937317, 0.46603697538375854, 0.9151694178581238, -0.4292004108428955, 0.49187469482421875, 0.12238237261772156, 0.012916442938148975, -0.16856008768081665, -0.001929907244630158, -0.4963182508945465, 0.3284400999546051, 0.16078251600265503, -0.6309604048728943, -0.23237912356853485, -0.2676551043987274, 0.15364398062229156, -0.3160894811153412, -0.2810691297054291, 0.6050021648406982, -0.006767196115106344, -0.5692799091339111, 0.6428927183151245, -0.15406450629234314, 0.6432110667228699, -0.6805838346481323, -0.2030700296163559, -0.1667584776878357, 0.30844801664352417, -0.507628321647644, -0.9336295127868652, 0.19100216031074524, 0.0584387481212616, -0.22606194019317627, -0.161971777677536, 0.707604169845581, -0.30256226658821106, -0.7095600366592407, 0.40872830152511597, 0.3090709149837494, 0.35947930812835693, 0.12399351596832275, -0.8920003771781921, 0.23978669941425323, 0.2595005929470062, -0.6882150769233704, 0.35279202461242676, 0.26052823662757874, 0.0736267939209938, 0.5299660563468933, 0.7511467337608337, 0.04517986997961998, 0.21849054098129272, -0.0697970762848854, 1.091033935546875, -0.6184168457984924, -0.4009084403514862, -0.8635541200637817, 0.8186265230178833, -0.35046476125717163, -0.6813746690750122, 0.9241971969604492, 1.003865361213684, 0.8250279426574707, 0.08922544121742249, 0.80890291929245, -0.5433135032653809, 0.38194456696510315, -0.3768082857131958, 0.8025826811790466, -0.7126309275627136, 0.12674598395824432, -0.2267727553844452, -0.898012101650238, -0.02391197718679905, 0.7057539224624634, -0.31915929913520813, -0.037889815866947174, 0.5019137263298035, 1.0064785480499268, 0.0736205205321312, 0.1639549881219864, -0.061793193221092224, 0.47301456332206726, 0.21265047788619995, 0.7216722369194031, 0.6548158526420593, -0.7083421349525452, 0.44712507724761963, -0.7198540568351746, -0.4374054968357086, -0.18567778170108795, -0.6971603631973267, -0.8054304718971252, -0.5136845707893372, -0.2999737858772278, -0.5016283988952637, -0.0020302743650972843, 0.8743087649345398, 0.530462920665741, -0.867917001247406, -0.35229170322418213, -0.13280464708805084, 0.1277347207069397, -0.20614087581634521, -0.3591466248035431, 0.573748767375946, -0.11656425893306732, -0.7880244255065918, 0.268524706363678, -0.14403851330280304, -0.17601396143436432, 0.052966903895139694, -0.2894728183746338, -0.2980208992958069, -0.2938842177391052, 0.37188026309013367, 0.1296648532152176, -0.6759276390075684, -0.22208279371261597, -0.023024948313832283, 0.06725258380174637, 0.19677101075649261, 0.27738794684410095, -0.5669661164283752, 0.12691780924797058, 0.622186005115509, 0.2211143523454666, 0.5996516942977905, -0.010965314693748951, 0.17737272381782532, -0.5625750422477722, -0.018155692145228386, 0.035613764077425, 0.5125166177749634, 0.15914803743362427, -0.4930282235145569, 0.9696372151374817, 0.31756463646888733, -0.7969067692756653, -0.9630820155143738, -0.22508135437965393, -1.1781219244003296, 0.06692913174629211, 1.3993185758590698, -0.24831518530845642, -0.4363326132297516, 0.13455578684806824, -0.2431677132844925, 0.3572031557559967, -0.6298356056213379, 0.40620967745780945, 0.7341888546943665, -0.27258211374282837, 0.00037340857670642436, -0.69537752866745, 0.2628009617328644, 0.07628551870584488, -0.9696100950241089, -0.05828752741217613, 0.33316633105278015, 0.384909987449646, 0.25023171305656433, 0.6009130477905273, -0.09757992625236511, -0.10663846135139465, 0.07214998453855515, 0.1554379165172577, -0.22911326587200165, -0.13044649362564087, -0.24736106395721436, 0.060020141303539276, -0.5217385292053223, -0.4861299395561218 ]
open-llm-leaderboard/details_KoboldAI__fairseq-dense-355M
open-llm-leaderboard
2023-10-21T17:11:46Z
200
0
[ "region:us" ]
null
2023-08-18T11:47:12Z
--- pretty_name: Evaluation run of KoboldAI/fairseq-dense-355M dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [KoboldAI/fairseq-dense-355M](https://huggingface.co/KoboldAI/fairseq-dense-355M)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KoboldAI__fairseq-dense-355M\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T17:11:35.207180](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__fairseq-dense-355M/blob/main/results_2023-10-21T17-11-35.207180.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.024643456375838927,\n\ \ \"em_stderr\": 0.0015877135903759347,\n \"f1\": 0.06476405201342264,\n\ \ \"f1_stderr\": 0.0019741533260558613,\n \"acc\": 0.2644041041831097,\n\ \ \"acc_stderr\": 0.0070145708079548084\n },\n \"harness|drop|3\":\ \ {\n \"em\": 0.024643456375838927,\n \"em_stderr\": 0.0015877135903759347,\n\ \ \"f1\": 0.06476405201342264,\n \"f1_stderr\": 0.0019741533260558613\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\"\ : 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5288082083662194,\n\ \ \"acc_stderr\": 0.014029141615909617\n }\n}\n```" repo_url: https://huggingface.co/KoboldAI/fairseq-dense-355M leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|arc:challenge|25_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T14:19:36.418877.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T17_11_35.207180 path: - '**/details_harness|drop|3_2023-10-21T17-11-35.207180.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T17-11-35.207180.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T17_11_35.207180 path: - '**/details_harness|gsm8k|5_2023-10-21T17-11-35.207180.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T17-11-35.207180.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hellaswag|10_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T14:19:36.418877.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:19:36.418877.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T14_19_36.418877 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T14:19:36.418877.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T14:19:36.418877.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T17_11_35.207180 path: - '**/details_harness|winogrande|5_2023-10-21T17-11-35.207180.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T17-11-35.207180.parquet' - config_name: results data_files: - split: 2023_07_19T14_19_36.418877 path: - results_2023-07-19T14:19:36.418877.parquet - split: 2023_10_21T17_11_35.207180 path: - results_2023-10-21T17-11-35.207180.parquet - split: latest path: - results_2023-10-21T17-11-35.207180.parquet --- # Dataset Card for Evaluation run of KoboldAI/fairseq-dense-355M ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KoboldAI/fairseq-dense-355M - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KoboldAI/fairseq-dense-355M](https://huggingface.co/KoboldAI/fairseq-dense-355M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KoboldAI__fairseq-dense-355M", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T17:11:35.207180](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__fairseq-dense-355M/blob/main/results_2023-10-21T17-11-35.207180.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.024643456375838927, "em_stderr": 0.0015877135903759347, "f1": 0.06476405201342264, "f1_stderr": 0.0019741533260558613, "acc": 0.2644041041831097, "acc_stderr": 0.0070145708079548084 }, "harness|drop|3": { "em": 0.024643456375838927, "em_stderr": 0.0015877135903759347, "f1": 0.06476405201342264, "f1_stderr": 0.0019741533260558613 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.5288082083662194, "acc_stderr": 0.014029141615909617 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.41585078835487366, -0.6539362668991089, 0.2748866081237793, 0.2338634580373764, -0.07423318922519684, -0.05610957741737366, -0.4112433195114136, -0.18131813406944275, 0.29906943440437317, 0.5914707779884338, -0.7017492055892944, -0.8715994358062744, -0.6486443877220154, 0.17287099361419678, -0.1367512196302414, 1.1079851388931274, -0.21364885568618774, -0.16201411187648773, 0.05542455241084099, -0.4133605659008026, -0.3949374258518219, -0.42352843284606934, -0.5693783760070801, -0.4052399694919586, 0.46971502900123596, 0.7183660268783569, 0.43470674753189087, 0.6059425473213196, 0.4975781738758087, 0.38983312249183655, -0.0814201831817627, 0.14379270374774933, -0.5382629036903381, -0.06615214049816132, 0.22943435609340668, -0.6864972710609436, -0.629806399345398, 0.05980845168232918, 0.7771826386451721, 0.46038737893104553, -0.21284598112106323, 0.6582988500595093, 0.11634411662817001, 0.6553654670715332, -0.5357733368873596, 0.36997684836387634, -0.4123295843601227, -0.01004754938185215, -0.41128507256507874, -0.08542456477880478, -0.07582666724920273, -0.34037309885025024, -0.2934756278991699, -0.5759094953536987, 0.15384945273399353, 0.13106638193130493, 1.0945711135864258, 0.13175007700920105, -0.26469308137893677, -0.15356148779392242, -0.30702662467956543, 0.922222375869751, -0.8946179747581482, 0.0916747897863388, 0.5898213982582092, 0.18626244366168976, -0.26796242594718933, -0.6429120898246765, -0.4152446687221527, 0.008634100668132305, -0.21494163572788239, 0.1835218071937561, 0.008558751083910465, -0.13365308940410614, 0.5439143776893616, 0.5353465676307678, -0.6730726957321167, 0.04710916802287102, -0.6127371788024902, -0.22965237498283386, 1.0908777713775635, 0.4475323557853699, 0.05812123790383339, -0.4974672198295593, -0.29450511932373047, -0.3553869426250458, -0.30768582224845886, 0.28089720010757446, 0.4543713629245758, 0.45898932218551636, -0.497437447309494, 0.8523164391517639, -0.6003941297531128, 0.5116965174674988, -0.0169282965362072, -0.2833767533302307, 0.8928956389427185, -0.5607672333717346, -0.2046610563993454, 0.07674258202314377, 1.1017214059829712, 0.4769977629184723, 0.024547480046749115, 0.16941341757774353, -0.36148256063461304, -0.08979855477809906, -0.03505672514438629, -0.8671723008155823, -0.05506980046629906, 0.4688928425312042, -0.6095108985900879, -0.4018976092338562, 0.2433594912290573, -0.8927246332168579, -0.20357918739318848, -0.3213360905647278, 0.27483537793159485, -0.13739633560180664, -0.30762800574302673, -0.05378670245409012, -0.13513654470443726, 0.36310237646102905, 0.11401604861021042, -0.6006728410720825, 0.34867867827415466, 0.6066139340400696, 0.9373781681060791, -0.14892828464508057, -0.36839088797569275, -0.47765612602233887, -0.2644363045692444, -0.13843503594398499, 0.4212460517883301, -0.13375188410282135, -0.39181894063949585, -0.18477889895439148, 0.5102728009223938, -0.3141629099845886, -0.5605102777481079, 0.8650381565093994, -0.3155006170272827, 0.1750955730676651, -0.28021177649497986, -0.4760517179965973, -0.13155071437358856, 0.37050628662109375, -0.7251427173614502, 1.4701963663101196, 0.3323429226875305, -0.9803528189659119, 0.10878989100456238, -0.7543261051177979, -0.18147608637809753, 0.028346914798021317, -0.025517677888274193, -0.6351333260536194, -0.14122174680233002, 0.12738122045993805, 0.6356269121170044, -0.26220741868019104, 0.20569320023059845, -0.4013356864452362, -0.46010804176330566, 0.21142809092998505, -0.14276525378227234, 1.1192374229431152, 0.33514782786369324, -0.42179229855537415, 0.1396275758743286, -1.0267137289047241, 0.18995976448059082, 0.38114669919013977, -0.4436902403831482, -0.1337757259607315, -0.3015018403530121, 0.27544480562210083, 0.19769960641860962, 0.5077857971191406, -0.5313078165054321, 0.2767215371131897, -0.22842274606227875, 0.2907041609287262, 0.9365108013153076, 0.03324082866311073, 0.3675418794155121, -0.5453861951828003, 0.508204460144043, -0.013956165872514248, 0.36951354146003723, 0.16482661664485931, -0.5534185171127319, -0.7571080923080444, -0.22469033300876617, 0.20911969244480133, 0.7681076526641846, -0.48262983560562134, 0.6599332690238953, -0.4339355528354645, -0.7752633690834045, -0.6552191972732544, 0.05036518722772598, 0.2767471671104431, 0.5304070711135864, 0.2514951825141907, -0.1748158484697342, -0.665464460849762, -1.0370196104049683, -0.1064605712890625, -0.059789612889289856, 0.04157881811261177, 0.5328881740570068, 0.9413015842437744, -0.4226745069026947, 0.6613098978996277, -0.7049911618232727, -0.25810524821281433, -0.17856501042842865, 0.008210184052586555, 0.8563281893730164, 0.5251657962799072, 0.4305529296398163, -0.711614727973938, -0.41500627994537354, -0.00933042075484991, -0.742918848991394, -0.2780386507511139, -0.11548905819654465, -0.3127070665359497, 0.3438582122325897, 0.04711226001381874, -0.539753258228302, 0.4986492097377777, 0.5024752020835876, -0.6083923578262329, 0.6978249549865723, 0.012287436053156853, 0.5356113314628601, -1.197364330291748, 0.1764974594116211, -0.03229564055800438, -0.05736146867275238, -0.4045918881893158, -0.1837814301252365, 0.0008241211180575192, 0.2221437245607376, -0.4518790543079376, 0.7531301379203796, -0.34964999556541443, -0.21007320284843445, 0.06881009787321091, 0.15577824413776398, -0.027431229129433632, 0.5675897002220154, -0.33452582359313965, 0.70393306016922, 0.5343800783157349, -0.3662894070148468, 0.38512441515922546, 0.38674065470695496, -0.5406627058982849, 0.23411127924919128, -0.5950512290000916, -0.10342811048030853, 0.1582929939031601, 0.12758272886276245, -0.8449496030807495, -0.44171789288520813, 0.5295921564102173, -0.5378879904747009, 0.15670838952064514, -0.32458049058914185, -0.6914636492729187, -0.3744577467441559, -0.4575633704662323, 0.25525984168052673, 0.5422800779342651, -0.40055933594703674, 0.33747774362564087, 0.3823910355567932, -0.06681816279888153, -0.6420972943305969, -0.7157775163650513, -0.2694791257381439, -0.4402017593383789, -0.7567489743232727, 0.29236385226249695, -0.20120970904827118, -0.2684813439846039, -0.057541657239198685, -0.01024569384753704, -0.08128205686807632, 0.217015340924263, 0.3796232342720032, 0.48145201802253723, -0.08194994181394577, -0.2124663144350052, -0.10747615993022919, -0.10784565657377243, 0.13241170346736908, 0.10593535751104355, 0.508838415145874, -0.305877149105072, -0.28809985518455505, -0.29904112219810486, 0.17958500981330872, 0.4593473970890045, -0.11360020935535431, 0.8708942532539368, 0.8587422966957092, -0.294158011674881, 0.01967925764620304, -0.4527977406978607, -0.0041935318149626255, -0.48334652185440063, 0.408812016248703, -0.3494485318660736, -0.8536501526832581, 0.9014227390289307, 0.1541222482919693, 0.16814132034778595, 0.8206892609596252, 0.571016788482666, 0.1106642335653305, 0.9031562209129333, 0.1984618902206421, -0.08349161595106125, 0.5669258832931519, -0.7308369278907776, -0.1301833689212799, -0.9704583287239075, -0.3697846531867981, -0.5590896010398865, -0.4002833366394043, -0.9138529300689697, -0.37821683287620544, 0.2503504753112793, 0.29232099652290344, -0.5368343591690063, 0.35604697465896606, -0.6666848659515381, 0.2015947699546814, 0.6023775935173035, 0.19483515620231628, 0.11930368840694427, -0.03133711218833923, 0.004517555236816406, 0.21859967708587646, -0.6648087501525879, -0.41817712783813477, 1.3656829595565796, 0.2669863998889923, 0.5776810646057129, -0.1448299139738083, 1.0798619985580444, 0.31121039390563965, 0.3355708420276642, -0.5300835967063904, 0.5677855610847473, 0.07664118707180023, -0.5865458250045776, -0.25884830951690674, -0.6938647627830505, -1.0056136846542358, 0.24157829582691193, -0.13030697405338287, -0.9697920083999634, 0.1971624344587326, -0.07573779672384262, -0.049243245273828506, 0.259801983833313, -0.6472519040107727, 0.9895714521408081, -0.2341417521238327, -0.42489269375801086, 0.06602829694747925, -0.7843710780143738, 0.41833052039146423, 0.03273232281208038, 0.503204345703125, -0.31447744369506836, -0.012631515972316265, 1.1583963632583618, -0.6083469986915588, 0.7280911207199097, -0.2089063972234726, 0.09182535856962204, 0.452006459236145, -0.41768375039100647, 0.6624289155006409, 0.07499628514051437, -0.33488088846206665, 0.4391157627105713, 0.020137811079621315, -0.42938345670700073, -0.32613590359687805, 0.9086527228355408, -0.9091504812240601, -0.24478916823863983, -0.4608660042285919, -0.6062056422233582, 0.22318145632743835, 0.3234361410140991, 0.43638160824775696, 0.36883655190467834, 0.008340668864548206, 0.19697648286819458, 0.32884272933006287, -0.0996159166097641, 0.42944467067718506, 0.3265659511089325, -0.25548386573791504, -0.7425290942192078, 0.7171325087547302, 0.3059796988964081, 0.03273237869143486, 0.03843797370791435, 0.05309297889471054, -0.5294969081878662, -0.3806151747703552, -0.3787350356578827, 0.2698201835155487, -0.6334668397903442, -0.3517107665538788, -0.5353333353996277, -0.23831284046173096, -0.435073584318161, -0.04327601566910744, -0.453389972448349, -0.543243944644928, -0.3995244801044464, -0.11696723848581314, 0.6267257928848267, 0.5019353032112122, -0.28943854570388794, 0.2790478467941284, -0.8896984457969666, 0.21981965005397797, -0.1734866499900818, 0.4463382661342621, -0.10718175023794174, -0.5693604946136475, -0.4749806523323059, 0.25131916999816895, -0.38587674498558044, -0.8710796236991882, 0.5374242067337036, -0.018197542056441307, 0.7282431125640869, 0.14335019886493683, 0.24783088266849518, 0.6606208682060242, -0.23351185023784637, 0.9049651026725769, -0.003073861123993993, -0.6373575925827026, 0.6958965063095093, -0.2988547384738922, 0.21585072576999664, 0.5704514980316162, 0.2064882516860962, -0.4694274365901947, -0.279049813747406, -1.0027148723602295, -1.099245548248291, 1.0614314079284668, 0.5810091495513916, -0.36860156059265137, 0.11516548693180084, 0.3335537314414978, -0.05405528470873833, 0.12786279618740082, -0.7025374174118042, -0.727512776851654, -0.10062367469072342, -0.3748653829097748, -0.13818378746509552, -0.04400366172194481, -0.4536169767379761, -0.401263952255249, 0.9268926382064819, -0.03453422337770462, 0.3801676630973816, 0.24799498915672302, -0.059955596923828125, 0.013055815361440182, 0.36497169733047485, 0.38689538836479187, 0.6419439315795898, -0.27333319187164307, -0.1935798078775406, 0.37171152234077454, -0.5289443731307983, -0.024471409618854523, 0.4914442002773285, -0.09862876683473587, -0.08539459109306335, 0.5245602130889893, 0.9343896508216858, 0.08212501555681229, -0.5087283849716187, 0.552421510219574, 0.021173708140850067, -0.35642510652542114, -0.4820556640625, 0.14701896905899048, -0.05996304750442505, 0.5683834552764893, 0.3629930317401886, -0.052153076976537704, 0.18300621211528778, -0.2843094766139984, 0.24888679385185242, 0.16480937600135803, -0.18062111735343933, -0.22781431674957275, 0.566608190536499, -0.10030053555965424, -0.24718965590000153, 0.7707773447036743, -0.20512059330940247, -0.5792725086212158, 1.1061456203460693, 0.2790646553039551, 0.8687313795089722, 0.020583761855959892, 0.08814536780118942, 0.7137055397033691, 0.2635088860988617, -0.24722039699554443, 0.6441234946250916, 0.06773004680871964, -0.5836365222930908, -0.2857530117034912, -0.7761664986610413, -0.049392301589250565, 0.21832525730133057, -1.015000581741333, 0.39816805720329285, 0.017184458673000336, -0.3233107030391693, -0.21858668327331543, 0.4669072926044464, -0.9248904585838318, 0.1466696709394455, 0.03308882564306259, 0.8523541688919067, -1.0344957113265991, 0.5566180944442749, 0.7908998131752014, -0.427724152803421, -0.9790814518928528, -0.34434255957603455, -0.05825713649392128, -0.709342360496521, 0.4077164828777313, 0.3567657768726349, 0.4671337306499481, -0.14344938099384308, -0.7454045414924622, -1.003108263015747, 1.5759968757629395, 0.14071132242679596, -0.61482834815979, 0.2636157274246216, 0.14240774512290955, 0.28376156091690063, -0.28879791498184204, 0.537142813205719, 0.6658559441566467, 0.7633001804351807, -0.09655988216400146, -0.8776937127113342, 0.2966572940349579, -0.473148375749588, -0.11054207384586334, 0.2973721921443939, -0.9185559153556824, 0.924811065196991, -0.19469980895519257, -0.07913284003734589, 0.02509845606982708, 0.4888269305229187, 0.6894106268882751, 0.5129202008247375, 0.5450453758239746, 0.7240167260169983, 0.7573153972625732, -0.38274064660072327, 1.0460004806518555, -0.27126210927963257, 0.8881076574325562, 1.0770459175109863, 0.0862269476056099, 0.6856387257575989, 0.3291070759296417, -0.40566036105155945, 0.4251212477684021, 0.8710651397705078, -0.37215614318847656, 0.35344597697257996, 0.20554430782794952, 0.013526312075555325, -0.1539570391178131, 0.022261109203100204, -0.5089702010154724, 0.37596210837364197, 0.16686974465847015, -0.44179099798202515, -0.07733198255300522, -0.1786344200372696, 0.11177822947502136, -0.28896769881248474, -0.2903401553630829, 0.5648008584976196, -0.0037181891966611147, -0.5228468179702759, 0.7636443972587585, -0.07793274521827698, 0.6613597273826599, -0.6141219139099121, -0.10979843884706497, -0.20164726674556732, 0.2129584550857544, -0.5312337279319763, -0.983331561088562, 0.1892874836921692, 0.015435556881129742, -0.2783942222595215, -0.11356855183839798, 0.7120165824890137, -0.3653887212276459, -0.6490981578826904, 0.36470869183540344, 0.4025525748729706, 0.34484297037124634, 0.05497991666197777, -0.9702755212783813, 0.28772857785224915, 0.228114053606987, -0.7972880601882935, 0.36234423518180847, 0.4838927686214447, -0.020171821117401123, 0.5543829798698425, 0.6626174449920654, -0.011230104602873325, 0.17505832016468048, -0.001730203628540039, 1.0759330987930298, -0.7539182305335999, -0.4119260013103485, -0.681071400642395, 0.9156206846237183, -0.3088454306125641, -0.5648398995399475, 0.8065860867500305, 1.0050263404846191, 0.8385875821113586, 0.10612751543521881, 0.7702524065971375, -0.47752079367637634, 0.3919057548046112, -0.3814224302768707, 0.8691231608390808, -0.8128448724746704, 0.231275737285614, -0.18435925245285034, -0.8744716644287109, 0.04678288474678993, 0.5337626934051514, -0.21542933583259583, 0.039859551936388016, 0.4682704508304596, 0.9879444241523743, 0.035776376724243164, 0.10206740349531174, -0.0002111782960128039, 0.46918147802352905, 0.15931262075901031, 0.5968150496482849, 0.6730560064315796, -0.8296231031417847, 0.5235949754714966, -0.6834295392036438, -0.44247305393218994, -0.17048455774784088, -0.6494618058204651, -0.7219412922859192, -0.5772501826286316, -0.32035478949546814, -0.6011173129081726, -0.049277715384960175, 0.9137322306632996, 0.44138088822364807, -0.8478575944900513, -0.3712688088417053, 0.06036609411239624, 0.2136622816324234, -0.19202744960784912, -0.3765806257724762, 0.5688340067863464, -0.19560888409614563, -0.7998600006103516, 0.18120348453521729, -0.26038503646850586, -0.21760712563991547, 0.1039169579744339, -0.3053523302078247, -0.34012290835380554, -0.2797342836856842, 0.43553626537323, 0.15902511775493622, -0.5990914106369019, -0.24265769124031067, -0.07127177715301514, -0.08480421453714371, 0.19900129735469818, 0.25120800733566284, -0.5296164155006409, 0.07112641632556915, 0.6767212748527527, 0.2564464807510376, 0.6917034387588501, -0.026677342131733894, 0.18812786042690277, -0.6109038591384888, 0.05302533879876137, 0.046443644911050797, 0.5535865426063538, 0.1506165862083435, -0.49592143297195435, 0.9496302604675293, 0.33486437797546387, -0.8550028800964355, -0.8959053754806519, -0.21870939433574677, -1.25126314163208, 0.0397585965692997, 1.4225199222564697, -0.19004453718662262, -0.3952954411506653, 0.12576645612716675, -0.23586665093898773, 0.4068174660205841, -0.6940689086914062, 0.47012558579444885, 0.7131357192993164, -0.2543187737464905, 0.030469348654150963, -0.727114200592041, 0.32309579849243164, 0.08081290870904922, -0.9035791754722595, -0.06587887555360794, 0.3466956913471222, 0.4245966076850891, 0.12347596138715744, 0.7406293153762817, -0.08919838815927505, -0.07872273027896881, 0.026094209402799606, 0.2550607919692993, -0.10639358311891556, -0.003376702079549432, -0.22640517354011536, 0.06972319632768631, -0.3520325720310211, -0.4291999340057373 ]
open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Shinen
open-llm-leaderboard
2023-10-21T16:39:09Z
200
0
[ "region:us" ]
null
2023-08-18T11:47:38Z
--- pretty_name: Evaluation run of KoboldAI/GPT-J-6B-Shinen dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [KoboldAI/GPT-J-6B-Shinen](https://huggingface.co/KoboldAI/GPT-J-6B-Shinen) on\ \ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Shinen\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T16:38:56.875450](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Shinen/blob/main/results_2023-10-21T16-38-56.875450.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n\ \ \"em_stderr\": 0.00033145814652192477,\n \"f1\": 0.047103607382550344,\n\ \ \"f1_stderr\": 0.001175475504491836,\n \"acc\": 0.330297940428669,\n\ \ \"acc_stderr\": 0.00865604909042797\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192477,\n\ \ \"f1\": 0.047103607382550344,\n \"f1_stderr\": 0.001175475504491836\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.019711902956785442,\n \ \ \"acc_stderr\": 0.0038289829787357113\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6408839779005525,\n \"acc_stderr\": 0.013483115202120229\n\ \ }\n}\n```" repo_url: https://huggingface.co/KoboldAI/GPT-J-6B-Shinen leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|arc:challenge|25_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T15:56:59.519326.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T16_38_56.875450 path: - '**/details_harness|drop|3_2023-10-21T16-38-56.875450.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T16-38-56.875450.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T16_38_56.875450 path: - '**/details_harness|gsm8k|5_2023-10-21T16-38-56.875450.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T16-38-56.875450.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hellaswag|10_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:56:59.519326.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:56:59.519326.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T15_56_59.519326 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T15:56:59.519326.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T15:56:59.519326.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T16_38_56.875450 path: - '**/details_harness|winogrande|5_2023-10-21T16-38-56.875450.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T16-38-56.875450.parquet' - config_name: results data_files: - split: 2023_07_19T15_56_59.519326 path: - results_2023-07-19T15:56:59.519326.parquet - split: 2023_10_21T16_38_56.875450 path: - results_2023-10-21T16-38-56.875450.parquet - split: latest path: - results_2023-10-21T16-38-56.875450.parquet --- # Dataset Card for Evaluation run of KoboldAI/GPT-J-6B-Shinen ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KoboldAI/GPT-J-6B-Shinen - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KoboldAI/GPT-J-6B-Shinen](https://huggingface.co/KoboldAI/GPT-J-6B-Shinen) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Shinen", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T16:38:56.875450](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Shinen/blob/main/results_2023-10-21T16-38-56.875450.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192477, "f1": 0.047103607382550344, "f1_stderr": 0.001175475504491836, "acc": 0.330297940428669, "acc_stderr": 0.00865604909042797 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192477, "f1": 0.047103607382550344, "f1_stderr": 0.001175475504491836 }, "harness|gsm8k|5": { "acc": 0.019711902956785442, "acc_stderr": 0.0038289829787357113 }, "harness|winogrande|5": { "acc": 0.6408839779005525, "acc_stderr": 0.013483115202120229 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4076618552207947, -0.6886895895004272, 0.2692546248435974, 0.16727085411548615, -0.22107569873332977, 0.04533560201525688, -0.4408663809299469, -0.14088812470436096, 0.36893922090530396, 0.6030100584030151, -0.6546779870986938, -0.8918060660362244, -0.7221741080284119, 0.14983640611171722, -0.08466359972953796, 1.2166723012924194, -0.23213568329811096, -0.26670071482658386, 0.11194447427988052, -0.2593629062175751, -0.37993916869163513, -0.43261197209358215, -0.5605586767196655, -0.4566993713378906, 0.510463297367096, 0.5092124938964844, 0.43610450625419617, 0.6446617245674133, 0.6535320281982422, 0.39549800753593445, -0.12073081731796265, 0.12296253442764282, -0.4286348819732666, -0.1063324511051178, 0.2229081094264984, -0.6749976277351379, -0.6637146472930908, 0.08854088187217712, 0.6890709400177002, 0.4295389652252197, -0.0525311604142189, 0.5682639479637146, 0.13657335937023163, 0.578717827796936, -0.5094329118728638, 0.5152681469917297, -0.32826676964759827, -0.0830511599779129, -0.4180663526058197, -0.08574768155813217, -0.091633640229702, -0.3628038465976715, -0.1903434693813324, -0.5397212505340576, 0.24150431156158447, 0.006751562934368849, 1.078177571296692, 0.13366800546646118, -0.2079073041677475, -0.25378331542015076, -0.31308576464653015, 0.8367328643798828, -0.9174337983131409, -0.06848654896020889, 0.5832819938659668, 0.1384103000164032, -0.2416638433933258, -0.6236921548843384, -0.3618515431880951, -0.06163471192121506, -0.32977911829948425, 0.30953866243362427, 0.15264733135700226, -0.06947600096464157, 0.49170997738838196, 0.6414066553115845, -0.7175149321556091, -0.044526390731334686, -0.5622116327285767, -0.15945126116275787, 0.9934697151184082, 0.32619038224220276, 0.09823104739189148, -0.5810840129852295, -0.2774292528629303, -0.40606221556663513, -0.33373838663101196, 0.19419488310813904, 0.5451419353485107, 0.538736879825592, -0.564188539981842, 0.7480398416519165, -0.3693753778934479, 0.5132296085357666, -0.07681473344564438, -0.2582661509513855, 0.8626267910003662, -0.6511268615722656, -0.24541449546813965, 0.04708459600806236, 1.0847992897033691, 0.3778585195541382, 0.049614038318395615, 0.18746453523635864, -0.2642851173877716, 0.03761628642678261, 0.09922263771295547, -0.7954779267311096, -0.2326117306947708, 0.3701207637786865, -0.574083685874939, -0.4359521269798279, 0.29276159405708313, -0.9444647431373596, -0.20271989703178406, -0.3160637319087982, 0.3107631802558899, -0.19622701406478882, -0.42678165435791016, -0.18432100117206573, -0.16740119457244873, 0.33495500683784485, 0.06876417994499207, -0.7150887846946716, 0.2807413637638092, 0.5942592620849609, 0.9682853817939758, -0.08930587023496628, -0.32124051451683044, -0.25364455580711365, -0.24229584634304047, -0.18604804575443268, 0.37593936920166016, -0.1968488246202469, -0.48977625370025635, -0.157582625746727, 0.440408319234848, -0.4229295551776886, -0.5380365252494812, 0.78166663646698, -0.2682264745235443, 0.20201104879379272, -0.3151853084564209, -0.4846269190311432, -0.11764668673276901, 0.31545713543891907, -0.6817435622215271, 1.3750113248825073, 0.3927445113658905, -0.9580053687095642, 0.18195801973342896, -0.8825287818908691, -0.21125128865242004, 0.1232958659529686, -0.00613506929948926, -0.6897188425064087, -0.1275765597820282, 0.2037392407655716, 0.5272653102874756, -0.14278241991996765, 0.14898179471492767, -0.2856408953666687, -0.44395527243614197, 0.12729310989379883, -0.13864898681640625, 1.056512475013733, 0.26271340250968933, -0.4972555935382843, 0.1380854845046997, -0.937347948551178, 0.18724890053272247, 0.38186025619506836, -0.44668203592300415, -0.22520539164543152, -0.3204137682914734, 0.18834926187992096, 0.12955544888973236, 0.5670391917228699, -0.5182887315750122, 0.38863152265548706, -0.25955814123153687, 0.33896228671073914, 0.9692869186401367, 0.03098924830555916, 0.24973826110363007, -0.4007304608821869, 0.4870748221874237, 0.024383774027228355, 0.36312276124954224, -0.006469217594712973, -0.6050448417663574, -0.7913721203804016, -0.1952807605266571, 0.23079170286655426, 0.7429637312889099, -0.591206967830658, 0.761676549911499, -0.4249958395957947, -0.6914454698562622, -0.7232896089553833, 0.06904307752847672, 0.417231947183609, 0.515791654586792, 0.3095969259738922, -0.22402051091194153, -0.695706844329834, -1.0561219453811646, -0.07315871119499207, -0.13497060537338257, 0.02664552442729473, 0.48200032114982605, 1.0640954971313477, -0.3101899325847626, 0.677636981010437, -0.6092925667762756, -0.2681087255477905, -0.31729283928871155, 0.07152236998081207, 0.8932386636734009, 0.47788724303245544, 0.4235928952693939, -0.7595752477645874, -0.4372154474258423, 0.06847316771745682, -0.8455067276954651, -0.23097045719623566, -0.15915434062480927, -0.2116478681564331, 0.34037378430366516, -0.13352757692337036, -0.5337112545967102, 0.5834312438964844, 0.49659791588783264, -0.5806882977485657, 0.6369171142578125, -0.08236093819141388, 0.5243493318557739, -1.244502305984497, 0.21309788525104523, 0.03525242209434509, -0.0036534450482577085, -0.4661727249622345, -0.11421377956867218, 0.07239271700382233, 0.2901364862918854, -0.3505549728870392, 0.6284139156341553, -0.3706541359424591, -0.2115718424320221, -0.012973552569746971, 0.1612820029258728, -0.15182223916053772, 0.5711969137191772, -0.31987476348876953, 0.8460540771484375, 0.5114063620567322, -0.342357337474823, 0.4395267963409424, 0.4415957033634186, -0.5163009762763977, 0.2251710593700409, -0.5418832898139954, -0.015491512604057789, 0.15911726653575897, 0.1537487506866455, -0.875951886177063, -0.4754525125026703, 0.5478023886680603, -0.5618189573287964, 0.25903138518333435, -0.42819222807884216, -0.6359489560127258, -0.5333430767059326, -0.44202736020088196, 0.19143471121788025, 0.5971322655677795, -0.4810045659542084, 0.3121006190776825, 0.31304988265037537, -0.14083024859428406, -0.542178750038147, -0.7149322628974915, -0.13519690930843353, -0.4118909239768982, -0.6891471743583679, 0.3633427023887634, -0.14762909710407257, -0.20143795013427734, -0.06419770419597626, -0.006315172184258699, 0.005267409607768059, 0.08340930938720703, 0.36120307445526123, 0.6007986664772034, -0.06171015277504921, -0.3702726364135742, -0.2156812846660614, -0.22536517679691315, 0.13461822271347046, 0.07757128775119781, 0.5158211588859558, -0.295791357755661, -0.2989024221897125, -0.2486376315355301, 0.11828670650720596, 0.46856898069381714, -0.06269778311252594, 0.833216667175293, 0.7861365675926208, -0.20649699866771698, 0.07031881809234619, -0.4008823335170746, 0.05421478673815727, -0.48919299244880676, 0.3643836975097656, -0.33319422602653503, -0.8074198365211487, 0.9010978937149048, 0.18641918897628784, 0.1607014238834381, 0.7845725417137146, 0.6142423748970032, 0.14877668023109436, 0.9093513488769531, 0.2126266211271286, -0.19681192934513092, 0.5589779615402222, -0.739891767501831, -0.006909285672008991, -1.0476033687591553, -0.3796265721321106, -0.4305185377597809, -0.4047141373157501, -0.9277896881103516, -0.2709354758262634, 0.3110993206501007, 0.15657326579093933, -0.44559207558631897, 0.45381084084510803, -0.7343414425849915, 0.25151315331459045, 0.7158205509185791, 0.2216644287109375, 0.015269466675817966, -0.06997131556272507, -0.07099740952253342, 0.14837197959423065, -0.5334466695785522, -0.3746245205402374, 1.3738347291946411, 0.23242877423763275, 0.5875620245933533, -0.11490333080291748, 1.0314303636550903, 0.29681938886642456, 0.34400251507759094, -0.47843194007873535, 0.5750318169593811, -0.07741354405879974, -0.5245370864868164, -0.18547669053077698, -0.6758485436439514, -0.9516629576683044, 0.24499240517616272, 0.06886626034975052, -0.9656680822372437, 0.058350980281829834, 0.006057989317923784, -0.005699879955500364, 0.361611545085907, -0.7141115665435791, 0.9494147300720215, -0.284376859664917, -0.40288129448890686, 0.12894034385681152, -0.8771846294403076, 0.43657177686691284, 0.08069033920764923, 0.3866383731365204, -0.15975454449653625, 0.10122531652450562, 1.126768708229065, -0.6172413229942322, 0.7639296054840088, -0.29531046748161316, 0.10374025255441666, 0.4375096559524536, -0.47633126378059387, 0.6933672428131104, -0.029792966321110725, -0.16884617507457733, 0.38503581285476685, -0.13448597490787506, -0.37433695793151855, -0.35045021772384644, 0.9028061032295227, -0.9141061902046204, -0.277933269739151, -0.4799283444881439, -0.5567061901092529, 0.25553491711616516, 0.2795571982860565, 0.43391913175582886, 0.31323370337486267, 0.1412123143672943, 0.2128278762102127, 0.33949440717697144, -0.09562288969755173, 0.5239931344985962, 0.25878146290779114, -0.2132045179605484, -0.8438698053359985, 0.779949426651001, 0.2914331555366516, 0.07216359674930573, 0.09032350033521652, 0.04739135131239891, -0.5848349332809448, -0.40148213505744934, -0.4302309453487396, 0.3100985586643219, -0.5987334251403809, -0.2537207007408142, -0.35135188698768616, -0.34039148688316345, -0.43759438395500183, -0.051117148250341415, -0.39407023787498474, -0.37678253650665283, -0.3025311231613159, -0.18114429712295532, 0.642560601234436, 0.5795994400978088, -0.24040007591247559, 0.32975995540618896, -0.7879484295845032, 0.21415969729423523, -0.1664591133594513, 0.46059074997901917, -0.21320496499538422, -0.5711455941200256, -0.4606214463710785, 0.20296332240104675, -0.3758809268474579, -0.8564251661300659, 0.5685665011405945, -0.051068976521492004, 0.7014375329017639, 0.12116213887929916, 0.11927325278520584, 0.7439389824867249, -0.19544945657253265, 1.0636595487594604, -0.0060426597483456135, -0.6461611986160278, 0.8260193467140198, -0.3925236463546753, 0.24589209258556366, 0.5297102928161621, 0.138229101896286, -0.4499962627887726, -0.30834901332855225, -1.002084493637085, -1.168095588684082, 1.0019906759262085, 0.6506556272506714, -0.3633464574813843, 0.12010771036148071, 0.3813445568084717, -0.05634469911456108, 0.13996145129203796, -0.6945275664329529, -0.7718546390533447, -0.22716501355171204, -0.3759155571460724, -0.13972923159599304, -0.012188035063445568, -0.4105803370475769, -0.3540339171886444, 0.9214659333229065, -0.0495433434844017, 0.5096303820610046, 0.2357606291770935, -0.06648949533700943, 0.045361846685409546, 0.3329402208328247, 0.45985379815101624, 0.6721332669258118, -0.3267378807067871, -0.1407168209552765, 0.2899315059185028, -0.6941503286361694, -0.020528363063931465, 0.4205910861492157, -0.14645704627037048, -0.053420402109622955, 0.5669071078300476, 0.9831585884094238, 0.07545018196105957, -0.30763813853263855, 0.4801000952720642, 0.05765180289745331, -0.3763114809989929, -0.49847716093063354, 0.20603835582733154, -0.08610254526138306, 0.3995616137981415, 0.3981390595436096, -0.10839176177978516, 0.007240673527121544, -0.24109840393066406, 0.23054610192775726, 0.19028164446353912, -0.07256686687469482, -0.30050739645957947, 0.6050028204917908, -0.09171374887228012, -0.29005691409111023, 0.7839896082878113, -0.1447921097278595, -0.605275571346283, 1.0726923942565918, 0.34443965554237366, 0.9366554021835327, -0.07652466744184494, 0.07990837097167969, 0.6244499087333679, 0.29850855469703674, -0.1681203991174698, 0.587495744228363, 0.11332613229751587, -0.5122554898262024, -0.31554776430130005, -0.7649020552635193, -0.19635017216205597, 0.4007394015789032, -0.9912328720092773, 0.30611491203308105, -0.09098955988883972, -0.32043424248695374, -0.18338634073734283, 0.47870901226997375, -0.8647559285163879, 0.17004163563251495, 0.04159979522228241, 0.790978729724884, -1.0613113641738892, 0.5932573676109314, 0.804939329624176, -0.5449769496917725, -0.8883224129676819, -0.35735973715782166, -0.026361515745520592, -0.7696413397789001, 0.3732418119907379, 0.33527228236198425, 0.5128942131996155, -0.1585739105939865, -0.6880118250846863, -0.9828424453735352, 1.5639973878860474, 0.0837496966123581, -0.5449538826942444, 0.20458708703517914, 0.17418138682842255, 0.37214457988739014, -0.2516026198863983, 0.5912582278251648, 0.7639485001564026, 0.8273298740386963, -0.0708601102232933, -0.9301819801330566, 0.22268781065940857, -0.5591441988945007, -0.009189960546791553, 0.38920044898986816, -0.8928314447402954, 0.9500251412391663, -0.07377239316701889, 0.022244516760110855, 0.009680639021098614, 0.3672178387641907, 0.6100959777832031, 0.3656049966812134, 0.4610235393047333, 0.7113332152366638, 0.6768038868904114, -0.49046018719673157, 1.0734912157058716, -0.32507041096687317, 0.9234067797660828, 1.0142171382904053, 0.03376706317067146, 0.61639004945755, 0.32177218794822693, -0.523011326789856, 0.4731413424015045, 0.8868880271911621, -0.46570709347724915, 0.46452295780181885, 0.13042797148227692, -0.1115981861948967, -0.06679961830377579, 0.11073415726423264, -0.5040345191955566, 0.29789143800735474, 0.23159489035606384, -0.5981849431991577, -0.12883444130420685, -0.23568201065063477, 0.12753161787986755, -0.3463729918003082, -0.31174445152282715, 0.596954882144928, -0.026151765137910843, -0.5777479410171509, 0.6587079167366028, -0.1482941061258316, 0.6785506010055542, -0.755710780620575, -0.1142556294798851, -0.1920270174741745, 0.31461015343666077, -0.49561113119125366, -1.0045461654663086, 0.1983235627412796, 0.04172621667385101, -0.2703322172164917, -0.18522146344184875, 0.7805436849594116, -0.3429974913597107, -0.6583673357963562, 0.3815114200115204, 0.33347100019454956, 0.36730989813804626, 0.11603621393442154, -0.9346293807029724, 0.24742810428142548, 0.26627713441848755, -0.7894914150238037, 0.476259708404541, 0.2961156666278839, 0.05616919323801994, 0.531167209148407, 0.7312301993370056, 0.09353742003440857, 0.14863930642604828, 0.0865853801369667, 1.0690380334854126, -0.7629532217979431, -0.4451991319656372, -0.8166596293449402, 0.9136455655097961, -0.3317824900150299, -0.5877875089645386, 0.8408194780349731, 0.9480502605438232, 0.8050459027290344, 0.05571127310395241, 0.9664787650108337, -0.4930906593799591, 0.443408727645874, -0.38849812746047974, 0.8715506792068481, -0.6262343525886536, 0.18701504170894623, -0.250843346118927, -0.8444116115570068, -0.03166426718235016, 0.689740777015686, -0.30414560437202454, 0.15183404088020325, 0.5127021074295044, 0.9507042765617371, 0.021212603896856308, 0.17161260545253754, -0.005531116388738155, 0.44017890095710754, 0.3081226646900177, 0.6470052003860474, 0.5721693634986877, -0.7818007469177246, 0.5081907510757446, -0.6597886681556702, -0.4529317021369934, -0.149275541305542, -0.6686646342277527, -0.7839027047157288, -0.5096662640571594, -0.3366243839263916, -0.5116216540336609, 0.039900023490190506, 0.896298885345459, 0.445107638835907, -0.8764437437057495, -0.2777307629585266, -0.0685952827334404, 0.1731194704771042, -0.1929934024810791, -0.38087791204452515, 0.6488277912139893, -0.20851847529411316, -0.7874113917350769, 0.2719312310218811, -0.15333126485347748, -0.16444803774356842, 0.02831128053367138, -0.23993468284606934, -0.281534343957901, -0.3753068745136261, 0.408188134431839, 0.15225806832313538, -0.6950790286064148, -0.364743173122406, -0.13913315534591675, -0.010663283057510853, 0.24199266731739044, 0.3236880898475647, -0.5830637812614441, 0.12232135981321335, 0.5626646876335144, 0.28728047013282776, 0.6823598146438599, 0.06182153522968292, 0.17064398527145386, -0.7012386918067932, -0.06547467410564423, -0.027234500274062157, 0.5088135004043579, 0.18265783786773682, -0.4639315903186798, 0.9771275520324707, 0.4313373267650604, -0.8768251538276672, -0.7914815545082092, -0.21294726431369781, -1.1679760217666626, 0.09790662676095963, 1.374917984008789, -0.3174014985561371, -0.40860602259635925, 0.09156840294599533, -0.2258302867412567, 0.4043348431587219, -0.6316380500793457, 0.5341912508010864, 0.763190746307373, -0.3107677400112152, -0.041991136968135834, -0.7311911582946777, 0.2783542275428772, 0.012081753462553024, -0.9261025786399841, -0.007896884344518185, 0.25840088725090027, 0.4199734330177307, 0.2642104923725128, 0.6445507407188416, -0.06560147553682327, -0.10811556875705719, 0.08106700330972672, 0.2039465755224228, -0.18464840948581696, -0.028752949088811874, -0.21704845130443573, 0.05551183596253395, -0.46433958411216736, -0.50225430727005 ]
open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Janeway
open-llm-leaderboard
2023-10-21T15:51:48Z
200
0
[ "region:us" ]
null
2023-08-18T11:48:21Z
--- pretty_name: Evaluation run of KoboldAI/GPT-J-6B-Janeway dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [KoboldAI/GPT-J-6B-Janeway](https://huggingface.co/KoboldAI/GPT-J-6B-Janeway)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Janeway\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T15:51:36.283517](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Janeway/blob/main/results_2023-10-21T15-51-36.283517.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n\ \ \"em_stderr\": 0.0003476179896857095,\n \"f1\": 0.04762374161073833,\n\ \ \"f1_stderr\": 0.001208940406482686,\n \"acc\": 0.33042240390432354,\n\ \ \"acc_stderr\": 0.008312737588634883\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857095,\n\ \ \"f1\": 0.04762374161073833,\n \"f1_stderr\": 0.001208940406482686\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.013646702047005308,\n \ \ \"acc_stderr\": 0.0031957470754808088\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6471981057616417,\n \"acc_stderr\": 0.013429728101788958\n\ \ }\n}\n```" repo_url: https://huggingface.co/KoboldAI/GPT-J-6B-Janeway leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|arc:challenge|25_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T15:39:54.753616.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T15_51_36.283517 path: - '**/details_harness|drop|3_2023-10-21T15-51-36.283517.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T15-51-36.283517.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T15_51_36.283517 path: - '**/details_harness|gsm8k|5_2023-10-21T15-51-36.283517.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T15-51-36.283517.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hellaswag|10_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:39:54.753616.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:39:54.753616.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T15_39_54.753616 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T15:39:54.753616.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T15:39:54.753616.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T15_51_36.283517 path: - '**/details_harness|winogrande|5_2023-10-21T15-51-36.283517.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T15-51-36.283517.parquet' - config_name: results data_files: - split: 2023_07_19T15_39_54.753616 path: - results_2023-07-19T15:39:54.753616.parquet - split: 2023_10_21T15_51_36.283517 path: - results_2023-10-21T15-51-36.283517.parquet - split: latest path: - results_2023-10-21T15-51-36.283517.parquet --- # Dataset Card for Evaluation run of KoboldAI/GPT-J-6B-Janeway ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KoboldAI/GPT-J-6B-Janeway - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KoboldAI/GPT-J-6B-Janeway](https://huggingface.co/KoboldAI/GPT-J-6B-Janeway) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Janeway", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T15:51:36.283517](https://huggingface.co/datasets/open-llm-leaderboard/details_KoboldAI__GPT-J-6B-Janeway/blob/main/results_2023-10-21T15-51-36.283517.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04762374161073833, "f1_stderr": 0.001208940406482686, "acc": 0.33042240390432354, "acc_stderr": 0.008312737588634883 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04762374161073833, "f1_stderr": 0.001208940406482686 }, "harness|gsm8k|5": { "acc": 0.013646702047005308, "acc_stderr": 0.0031957470754808088 }, "harness|winogrande|5": { "acc": 0.6471981057616417, "acc_stderr": 0.013429728101788958 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.37808138132095337, -0.6864941120147705, 0.27811145782470703, 0.17402125895023346, -0.1906767636537552, 0.019900547340512276, -0.3823520243167877, -0.12380117177963257, 0.3072158992290497, 0.6010285019874573, -0.6661622524261475, -0.9037082195281982, -0.7308927178382874, 0.1647564321756363, -0.0749649778008461, 1.1660064458847046, -0.24072366952896118, -0.22625732421875, 0.07706305384635925, -0.2746164798736572, -0.3756865859031677, -0.45779648423194885, -0.5512518882751465, -0.43412840366363525, 0.5163002014160156, 0.49897825717926025, 0.39614617824554443, 0.6430802345275879, 0.6090958714485168, 0.4047682583332062, -0.14435608685016632, 0.15821845829486847, -0.41849061846733093, -0.1111515685915947, 0.21047967672348022, -0.6698744893074036, -0.6650611758232117, 0.09828273952007294, 0.7243463397026062, 0.4512707591056824, -0.06045766919851303, 0.5630983114242554, 0.13004626333713531, 0.5967158675193787, -0.5048444271087646, 0.4799632430076599, -0.33413413166999817, -0.085105761885643, -0.3834446370601654, -0.10978490859270096, -0.09493422508239746, -0.3628080189228058, -0.17869149148464203, -0.5627421140670776, 0.20685362815856934, 0.03021351806819439, 1.0618270635604858, 0.1281152367591858, -0.2603616714477539, -0.3045448660850525, -0.2827494442462921, 0.8161115646362305, -0.8906059861183167, -0.04206082969903946, 0.5751420855522156, 0.1327337622642517, -0.30618569254875183, -0.56941157579422, -0.3717067539691925, -0.09875664860010147, -0.3327822983264923, 0.26713481545448303, 0.11170564591884613, -0.08512752503156662, 0.4936771094799042, 0.5772958397865295, -0.7547248005867004, -0.03561002016067505, -0.6167131662368774, -0.14253848791122437, 0.9657822251319885, 0.3306049108505249, 0.07785750925540924, -0.5239622592926025, -0.269195020198822, -0.36761438846588135, -0.3439484238624573, 0.1931741088628769, 0.5651187896728516, 0.5497230291366577, -0.5905584096908569, 0.7619715332984924, -0.42871060967445374, 0.5519829988479614, -0.0867496058344841, -0.2696729004383087, 0.862406849861145, -0.5987058877944946, -0.26899129152297974, 0.010881306603550911, 1.0648245811462402, 0.3764922320842743, 0.03536157310009003, 0.1597985178232193, -0.2236613631248474, -0.009869467467069626, 0.09521284699440002, -0.7803689241409302, -0.21340171992778778, 0.4017103314399719, -0.578338086605072, -0.4390929043292999, 0.28436535596847534, -0.8901304006576538, -0.19964244961738586, -0.3133834898471832, 0.2954823076725006, -0.1770198941230774, -0.4523472785949707, -0.17151600122451782, -0.19241859018802643, 0.3137619197368622, 0.09674306213855743, -0.6886876225471497, 0.24256137013435364, 0.571849524974823, 0.9697788953781128, -0.05859239399433136, -0.32745125889778137, -0.2917461097240448, -0.2116450071334839, -0.17482104897499084, 0.36632949113845825, -0.19128234684467316, -0.5318378210067749, -0.15368442237377167, 0.449340283870697, -0.38738057017326355, -0.5259037613868713, 0.7477433085441589, -0.25065475702285767, 0.2492910474538803, -0.30233845114707947, -0.4567119777202606, -0.111525759100914, 0.35504770278930664, -0.665928304195404, 1.3403418064117432, 0.40947678685188293, -0.9050774574279785, 0.1588556170463562, -0.8801103234291077, -0.191148579120636, 0.08335808664560318, 0.02333606779575348, -0.689276397228241, -0.12876413762569427, 0.17020413279533386, 0.5264683365821838, -0.17903758585453033, 0.10889497399330139, -0.31030380725860596, -0.4435083866119385, 0.12038996815681458, -0.1407451331615448, 1.1187944412231445, 0.24446797370910645, -0.4564278721809387, 0.10091348737478256, -0.9830478429794312, 0.20974645018577576, 0.38559168577194214, -0.4831584393978119, -0.24939225614070892, -0.3467958867549896, 0.1637328565120697, 0.14008989930152893, 0.544683575630188, -0.511522114276886, 0.43732041120529175, -0.216799795627594, 0.31905999779701233, 0.9776870608329773, 0.007880214601755142, 0.308056503534317, -0.38043397665023804, 0.4848134219646454, 0.03170569986104965, 0.38272130489349365, 0.0393192395567894, -0.6001935005187988, -0.8074231743812561, -0.17551065981388092, 0.2288873940706253, 0.7294552326202393, -0.5752572417259216, 0.7329002618789673, -0.4100177586078644, -0.7440119385719299, -0.7234380841255188, 0.07796608656644821, 0.4396485984325409, 0.5069432258605957, 0.3248906433582306, -0.2391088455915451, -0.6372745633125305, -1.0277948379516602, -0.037615180015563965, -0.20434126257896423, 0.007425044197589159, 0.4599689245223999, 1.0174115896224976, -0.29984572529792786, 0.6269848942756653, -0.6250688433647156, -0.2548716366291046, -0.25506502389907837, 0.1254749298095703, 0.8696199655532837, 0.504126250743866, 0.4248058497905731, -0.7585139870643616, -0.40299803018569946, 0.04523780941963196, -0.8254129886627197, -0.2671273648738861, -0.17456728219985962, -0.24024240672588348, 0.37059691548347473, -0.12292132526636124, -0.5877148509025574, 0.570796549320221, 0.5353285670280457, -0.6245755553245544, 0.6224927306175232, -0.07421799004077911, 0.5070706009864807, -1.223793387413025, 0.19676655530929565, 0.013001691550016403, 0.01858031563460827, -0.4555839002132416, -0.13690555095672607, 0.07338394969701767, 0.3377922475337982, -0.37228527665138245, 0.6123902797698975, -0.4345838129520416, -0.2141113430261612, -0.0013459337642416358, 0.13293853402137756, -0.12548071146011353, 0.5664070844650269, -0.31533005833625793, 0.81691974401474, 0.4582887589931488, -0.33727362751960754, 0.3856472074985504, 0.43363478779792786, -0.46665114164352417, 0.2552768886089325, -0.5324382781982422, 0.018609115853905678, 0.14840763807296753, 0.16247838735580444, -0.880283772945404, -0.416058212518692, 0.5715163946151733, -0.6155201196670532, 0.2542895972728729, -0.4772203862667084, -0.6191115975379944, -0.48707717657089233, -0.39236801862716675, 0.16786332428455353, 0.5758635997772217, -0.44519779086112976, 0.35020169615745544, 0.3303346037864685, -0.11211585253477097, -0.5447059869766235, -0.672343909740448, -0.14385007321834564, -0.39832764863967896, -0.652913510799408, 0.3230195939540863, -0.14409346878528595, -0.20051582157611847, -0.028964035212993622, 0.015246344730257988, -0.0399986132979393, 0.11796534061431885, 0.37946218252182007, 0.61173015832901, -0.04649446904659271, -0.32864436507225037, -0.21541954576969147, -0.21236130595207214, 0.16278131306171417, 0.060824550688266754, 0.49445202946662903, -0.246176078915596, -0.30245763063430786, -0.2017238885164261, 0.18438778817653656, 0.470632404088974, -0.0658864825963974, 0.8199476599693298, 0.8182176351547241, -0.18576133251190186, 0.08412756770849228, -0.37203335762023926, 0.10894590616226196, -0.4817548394203186, 0.35265544056892395, -0.37861499190330505, -0.8579022884368896, 0.8208010792732239, 0.20229756832122803, 0.14505796134471893, 0.7713584899902344, 0.6407222151756287, 0.15675827860832214, 0.8924926519393921, 0.2046920508146286, -0.21034985780715942, 0.5285245180130005, -0.7159689664840698, -0.023016739636659622, -1.028627872467041, -0.36037948727607727, -0.4911404252052307, -0.33084675669670105, -0.966517448425293, -0.3348753750324249, 0.30149492621421814, 0.1744288057088852, -0.4708098769187927, 0.4569629728794098, -0.7736971378326416, 0.25069132447242737, 0.7204927206039429, 0.17912577092647552, 0.03859527409076691, -0.09186714887619019, -0.05739383399486542, 0.21334931254386902, -0.5354253649711609, -0.39896756410598755, 1.3896501064300537, 0.24076656997203827, 0.585896909236908, -0.050228118896484375, 1.0052820444107056, 0.26676976680755615, 0.36294031143188477, -0.4724326431751251, 0.5482193231582642, -0.018913760781288147, -0.5460212230682373, -0.2029242366552353, -0.6659788489341736, -0.9231869578361511, 0.2440764307975769, -0.003455491503700614, -0.9945346713066101, 0.07287752628326416, 0.01573077216744423, 0.005765595473349094, 0.3312569558620453, -0.715607225894928, 0.9016134738922119, -0.25863412022590637, -0.40566229820251465, 0.148093119263649, -0.8607630133628845, 0.4400874376296997, 0.0845581591129303, 0.34351539611816406, -0.18078552186489105, 0.08354415744543076, 1.0660122632980347, -0.6070929765701294, 0.7691954970359802, -0.3121952712535858, 0.10906834155321121, 0.40959876775741577, -0.47954121232032776, 0.6895422339439392, -0.040874361991882324, -0.13479116559028625, 0.44894611835479736, -0.11789857596158981, -0.38564532995224, -0.3676661550998688, 0.8818216323852539, -0.926842451095581, -0.3200485408306122, -0.49144142866134644, -0.5577310919761658, 0.2464107871055603, 0.27320584654808044, 0.3917214572429657, 0.27659377455711365, 0.1395251303911209, 0.20028409361839294, 0.3156278431415558, -0.11064330488443375, 0.4667726457118988, 0.3172893822193146, -0.15917152166366577, -0.7972866296768188, 0.7471904158592224, 0.29656222462654114, 0.09152165800333023, 0.1340755671262741, 0.01884526200592518, -0.5902116894721985, -0.4404359757900238, -0.43941086530685425, 0.2928716540336609, -0.5895729064941406, -0.24824665486812592, -0.4182370901107788, -0.3274644613265991, -0.4643208682537079, -0.041524793952703476, -0.37931621074676514, -0.40031319856643677, -0.32582831382751465, -0.16460008919239044, 0.6102912425994873, 0.6144396662712097, -0.2556590437889099, 0.30766311287879944, -0.820978045463562, 0.15761631727218628, -0.14765332639217377, 0.3933483958244324, -0.2010968029499054, -0.5272677540779114, -0.45438152551651, 0.16782362759113312, -0.38952550292015076, -0.8439086079597473, 0.5625541806221008, -0.1297193020582199, 0.6944422721862793, 0.0756874680519104, 0.10998370498418808, 0.7681797742843628, -0.19958049058914185, 1.0334506034851074, -0.002288823015987873, -0.6588253974914551, 0.7660295963287354, -0.39742517471313477, 0.2598361372947693, 0.5014495849609375, 0.15916351974010468, -0.4537247121334076, -0.31298837065696716, -0.9631719589233398, -1.167553186416626, 0.9426990747451782, 0.6681380271911621, -0.3678737282752991, 0.1415441483259201, 0.33558690547943115, -0.0825888141989708, 0.13552212715148926, -0.6810023784637451, -0.7227076292037964, -0.1911603808403015, -0.38450947403907776, -0.10984064638614655, -0.020469000563025475, -0.4146091639995575, -0.36003294587135315, 0.9297217130661011, -0.05839735269546509, 0.5637480020523071, 0.2332456409931183, -0.05101030319929123, 0.03149945288896561, 0.34587031602859497, 0.4826968014240265, 0.7121762633323669, -0.3521811366081238, -0.18805676698684692, 0.3056398332118988, -0.6464130282402039, 0.03538474813103676, 0.44871941208839417, -0.13552924990653992, -0.030258262529969215, 0.6028891205787659, 0.9377409219741821, 0.018492668867111206, -0.3118601441383362, 0.4787036180496216, 0.02992592751979828, -0.4037744104862213, -0.5451991558074951, 0.1920531839132309, -0.038828834891319275, 0.43628790974617004, 0.39033135771751404, -0.15811358392238617, 0.04852581396698952, -0.22981442511081696, 0.20218336582183838, 0.2248816192150116, -0.09347735345363617, -0.26033610105514526, 0.5985252261161804, -0.054305799305438995, -0.3031463027000427, 0.7759526968002319, -0.14156928658485413, -0.5880905389785767, 1.0951364040374756, 0.32376962900161743, 0.9437413215637207, -0.07344819605350494, 0.0665614977478981, 0.6192497611045837, 0.3339402377605438, -0.09080053120851517, 0.5524143576622009, 0.09704376012086868, -0.5291024446487427, -0.28815409541130066, -0.7497678995132446, -0.21889498829841614, 0.38526853919029236, -1.002342939376831, 0.31410661339759827, -0.08665703982114792, -0.2846449911594391, -0.16359053552150726, 0.443549245595932, -0.8894187808036804, 0.2030399590730667, 0.03993768244981766, 0.83595871925354, -1.054314136505127, 0.6448134779930115, 0.8054232001304626, -0.4989691972732544, -0.9175804257392883, -0.31170809268951416, 0.04090125113725662, -0.7899102568626404, 0.4198250472545624, 0.32652556896209717, 0.5018158555030823, -0.17269006371498108, -0.6368563175201416, -0.9842108488082886, 1.5857658386230469, 0.017676131799817085, -0.5889018177986145, 0.15586991608142853, 0.17139160633087158, 0.3406262695789337, -0.2767626941204071, 0.6211156845092773, 0.7424795031547546, 0.7900239825248718, -0.07145706564188004, -0.9017788171768188, 0.2478291094303131, -0.5584361553192139, 0.004101151134818792, 0.3861554265022278, -0.9044173359870911, 0.9562722444534302, -0.08659996092319489, 0.047081973403692245, 0.03873699903488159, 0.35314491391181946, 0.5925227999687195, 0.35618650913238525, 0.5328239798545837, 0.7068743109703064, 0.6565993428230286, -0.47237277030944824, 1.0527933835983276, -0.29172763228416443, 0.8970819711685181, 1.0471831560134888, 0.07258304208517075, 0.6253329515457153, 0.33962342143058777, -0.4977159798145294, 0.43999606370925903, 0.8769075274467468, -0.42395439743995667, 0.4650326073169708, 0.16230076551437378, -0.09152336418628693, -0.06909103691577911, 0.09403003007173538, -0.5025243759155273, 0.3504062294960022, 0.20362873375415802, -0.6348200440406799, -0.1114906519651413, -0.2855750024318695, 0.15976586937904358, -0.3660181760787964, -0.25940409302711487, 0.588572084903717, -0.05367828160524368, -0.5619001388549805, 0.6639211177825928, -0.15204793214797974, 0.6207490563392639, -0.7513607144355774, -0.1666140854358673, -0.16795864701271057, 0.27914178371429443, -0.43787914514541626, -0.9899287819862366, 0.17788533866405487, 0.0431242361664772, -0.25600573420524597, -0.15364386141300201, 0.7427290081977844, -0.3630177676677704, -0.6542522311210632, 0.391690194606781, 0.3556250333786011, 0.32805243134498596, 0.12054459005594254, -0.9519045352935791, 0.26332759857177734, 0.2790072560310364, -0.804943859577179, 0.4577281177043915, 0.3351444602012634, 0.05616764724254608, 0.5178689956665039, 0.7417191863059998, 0.10362638533115387, 0.09242280572652817, 0.03698710352182388, 1.1408640146255493, -0.7884306907653809, -0.4107440710067749, -0.8104966878890991, 0.9732427597045898, -0.36733466386795044, -0.6382852792739868, 0.8289772868156433, 0.9053504467010498, 0.8561816215515137, 0.055264852941036224, 0.9907298684120178, -0.5100163817405701, 0.4620817005634308, -0.424457311630249, 0.8538295030593872, -0.6467036604881287, 0.17791017889976501, -0.24162261188030243, -0.8238169550895691, -0.01707276701927185, 0.6785417199134827, -0.2768433690071106, 0.10667715966701508, 0.4966013729572296, 0.9933764934539795, 0.0685291588306427, 0.12617817521095276, 0.017226368188858032, 0.44870132207870483, 0.31495562195777893, 0.6828500628471375, 0.587651252746582, -0.7605809569358826, 0.5231698751449585, -0.601233720779419, -0.48084092140197754, -0.1350860297679901, -0.666964590549469, -0.8317950963973999, -0.5370532274246216, -0.29125484824180603, -0.522808849811554, 0.11208948493003845, 0.9203718900680542, 0.45468202233314514, -0.8215411901473999, -0.27761751413345337, -0.02986392192542553, 0.18655423820018768, -0.16077551245689392, -0.36941102147102356, 0.5927138328552246, -0.17674200236797333, -0.8330947160720825, 0.24428673088550568, -0.13379564881324768, -0.12463097274303436, -0.024431241676211357, -0.2473866194486618, -0.3121170997619629, -0.38167059421539307, 0.4171660840511322, 0.14175939559936523, -0.6796872615814209, -0.38909029960632324, -0.1615636944770813, -0.028960803523659706, 0.2064095437526703, 0.3250357508659363, -0.5442234873771667, 0.14780789613723755, 0.5646544098854065, 0.2598637640476227, 0.660387396812439, 0.06288988143205643, 0.22670136392116547, -0.6599510312080383, -0.02351648174226284, -0.031029727309942245, 0.49755987524986267, 0.17984755337238312, -0.49936679005622864, 1.0124903917312622, 0.4363444149494171, -0.872311532497406, -0.7776941061019897, -0.22703878581523895, -1.1711312532424927, 0.05122198536992073, 1.3881667852401733, -0.35363882780075073, -0.44031980633735657, 0.060360368341207504, -0.22236649692058563, 0.3893769085407257, -0.6411563158035278, 0.5193646550178528, 0.7787154316902161, -0.2753145396709442, -0.04905867949128151, -0.7116990685462952, 0.2666480243206024, -0.016792450100183487, -0.9135765433311462, -0.035187236964702606, 0.298818975687027, 0.429046630859375, 0.2573535740375519, 0.7054323554039001, -0.05701654031872749, -0.13569015264511108, 0.09767895191907883, 0.20640146732330322, -0.21178725361824036, -0.029545631259679794, -0.18860648572444916, 0.0697949230670929, -0.4203474819660187, -0.5303723216056824 ]
open-llm-leaderboard/details_togethercomputer__Pythia-Chat-Base-7B
open-llm-leaderboard
2023-10-21T17:20:09Z
200
0
[ "region:us" ]
null
2023-08-18T11:49:14Z
--- pretty_name: Evaluation run of togethercomputer/Pythia-Chat-Base-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [togethercomputer/Pythia-Chat-Base-7B](https://huggingface.co/togethercomputer/Pythia-Chat-Base-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_togethercomputer__Pythia-Chat-Base-7B\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T17:19:57.992489](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__Pythia-Chat-Base-7B/blob/main/results_2023-10-21T17-19-57.992489.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n\ \ \"em_stderr\": 0.00033145814652192835,\n \"f1\": 0.04749370805369145,\n\ \ \"f1_stderr\": 0.0011837049377399571,\n \"acc\": 0.34051740902946614,\n\ \ \"acc_stderr\": 0.009473843193280568\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192835,\n\ \ \"f1\": 0.04749370805369145,\n \"f1_stderr\": 0.0011837049377399571\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.04094010614101592,\n \ \ \"acc_stderr\": 0.00545807679629435\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6400947119179163,\n \"acc_stderr\": 0.013489609590266786\n\ \ }\n}\n```" repo_url: https://huggingface.co/togethercomputer/Pythia-Chat-Base-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|arc:challenge|25_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T16:40:02.088273.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T17_19_57.992489 path: - '**/details_harness|drop|3_2023-10-21T17-19-57.992489.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T17-19-57.992489.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T17_19_57.992489 path: - '**/details_harness|gsm8k|5_2023-10-21T17-19-57.992489.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T17-19-57.992489.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hellaswag|10_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:40:02.088273.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:40:02.088273.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T16_40_02.088273 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T16:40:02.088273.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T16:40:02.088273.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T17_19_57.992489 path: - '**/details_harness|winogrande|5_2023-10-21T17-19-57.992489.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T17-19-57.992489.parquet' - config_name: results data_files: - split: 2023_07_19T16_40_02.088273 path: - results_2023-07-19T16:40:02.088273.parquet - split: 2023_10_21T17_19_57.992489 path: - results_2023-10-21T17-19-57.992489.parquet - split: latest path: - results_2023-10-21T17-19-57.992489.parquet --- # Dataset Card for Evaluation run of togethercomputer/Pythia-Chat-Base-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/togethercomputer/Pythia-Chat-Base-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [togethercomputer/Pythia-Chat-Base-7B](https://huggingface.co/togethercomputer/Pythia-Chat-Base-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_togethercomputer__Pythia-Chat-Base-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T17:19:57.992489](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__Pythia-Chat-Base-7B/blob/main/results_2023-10-21T17-19-57.992489.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192835, "f1": 0.04749370805369145, "f1_stderr": 0.0011837049377399571, "acc": 0.34051740902946614, "acc_stderr": 0.009473843193280568 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192835, "f1": 0.04749370805369145, "f1_stderr": 0.0011837049377399571 }, "harness|gsm8k|5": { "acc": 0.04094010614101592, "acc_stderr": 0.00545807679629435 }, "harness|winogrande|5": { "acc": 0.6400947119179163, "acc_stderr": 0.013489609590266786 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3865588903427124, -0.7554640173912048, 0.1781276911497116, 0.27624860405921936, -0.1646449714899063, 0.21411946415901184, -0.46762749552726746, -0.1522408425807953, 0.41104811429977417, 0.47027575969696045, -0.6682786345481873, -0.862969696521759, -0.6743501424789429, 0.07248895615339279, -0.15703298151493073, 1.0695322751998901, -0.1223778948187828, -0.22681565582752228, -0.03247612342238426, -0.21813790500164032, -0.4426436424255371, -0.5400803685188293, -0.4552611708641052, -0.44723930954933167, 0.3224325478076935, 0.7269128561019897, 0.4654689431190491, 0.6249699592590332, 0.6725620031356812, 0.4286941885948181, -0.1129315122961998, 0.19096359610557556, -0.522844672203064, -0.04895242676138878, 0.24626301229000092, -0.5965566635131836, -0.7363508939743042, 0.04925386980175972, 0.6567275524139404, 0.4953268766403198, -0.16993072628974915, 0.6238192915916443, 0.12823551893234253, 0.5381864309310913, -0.4620136320590973, 0.40864503383636475, -0.3564428985118866, -0.11103638261556625, -0.3418363630771637, -0.18824803829193115, -0.11327533423900604, -0.3184628486633301, -0.10795492678880692, -0.4585772752761841, 0.1139846220612526, 0.13665473461151123, 0.989523708820343, 0.060202568769454956, -0.18096797168254852, -0.22633297741413116, -0.34893494844436646, 0.844102680683136, -0.8463019132614136, 0.014553926885128021, 0.5683223009109497, 0.12741221487522125, -0.4270867705345154, -0.6575840711593628, -0.3918187618255615, -0.09196048974990845, -0.28188732266426086, 0.10412605851888657, -0.13563555479049683, -0.08140356838703156, 0.41240668296813965, 0.6842561960220337, -0.6576340198516846, -0.021791158244013786, -0.5865494608879089, -0.22752976417541504, 0.9521380662918091, 0.3006788492202759, 0.21728622913360596, -0.47785231471061707, -0.2642982304096222, -0.3258451521396637, -0.39045727252960205, 0.18964944779872894, 0.4839664101600647, 0.4763329327106476, -0.6994526386260986, 0.8242529630661011, -0.47627851366996765, 0.49075326323509216, -0.0950290858745575, -0.21471871435642242, 0.8092476725578308, -0.5529522895812988, -0.19864127039909363, -0.009436546824872494, 1.1354376077651978, 0.45771488547325134, 0.07001914829015732, 0.14111767709255219, -0.10517863184213638, 0.07987472414970398, 0.0557398796081543, -0.7744159698486328, -0.3262210190296173, 0.41890764236450195, -0.6311905980110168, -0.40929439663887024, 0.15538771450519562, -0.9888138175010681, -0.23650312423706055, -0.21896034479141235, 0.24334874749183655, -0.18414489924907684, -0.4806377589702606, -0.1859671175479889, -0.10354803502559662, 0.17039619386196136, 0.0889093205332756, -0.6519941091537476, 0.4169767200946808, 0.6035640239715576, 1.0364038944244385, -0.13133645057678223, -0.4014013409614563, -0.3003515303134918, -0.354564905166626, -0.09307759255170822, 0.46381449699401855, -0.15802675485610962, -0.42825794219970703, -0.24480333924293518, 0.26052728295326233, -0.2712550759315491, -0.5068187117576599, 0.6074694395065308, -0.23562513291835785, 0.15377241373062134, -0.21256425976753235, -0.4853804111480713, -0.10388536006212234, 0.3192676305770874, -0.6496414542198181, 1.413038969039917, 0.27526146173477173, -0.9388071298599243, 0.04279075562953949, -0.7510841488838196, -0.18274375796318054, 0.057452961802482605, -0.02270287275314331, -0.604267418384552, -0.14368386566638947, 0.15235857665538788, 0.5438528656959534, -0.37838590145111084, 0.10136529803276062, -0.3234241008758545, -0.4607245922088623, 0.2506381571292877, -0.20686256885528564, 1.0340856313705444, 0.13489718735218048, -0.47834312915802, 0.21807506680488586, -0.9383283853530884, 0.1839224249124527, 0.2953462600708008, -0.49926888942718506, -0.14217056334018707, -0.1655711531639099, 0.14133360981941223, 0.1869506537914276, 0.5591012835502625, -0.5444632768630981, 0.3283320367336273, -0.23375429213047028, 0.3572980761528015, 0.9172974228858948, 0.026512743905186653, 0.33779144287109375, -0.4655396640300751, 0.48521801829338074, 0.0457061305642128, 0.3548218309879303, 0.1589706540107727, -0.585905134677887, -0.8347548246383667, -0.24145956337451935, 0.05831868201494217, 0.6765193939208984, -0.49168363213539124, 0.7900855541229248, -0.4061300456523895, -0.7665058970451355, -0.6550837159156799, 0.10386613756418228, 0.45260363817214966, 0.49093160033226013, 0.36363038420677185, -0.35208606719970703, -0.7192492485046387, -1.0003173351287842, 0.046644844114780426, -0.2603360116481781, 0.0738740861415863, 0.5317357182502747, 0.9660676121711731, -0.2748071253299713, 0.6715734601020813, -0.6633790135383606, -0.2335546910762787, -0.34597963094711304, 0.010689939372241497, 0.7678633332252502, 0.5539518594741821, 0.36330774426460266, -0.6108261942863464, -0.3346457779407501, -0.058498334139585495, -0.7824936509132385, -0.17850637435913086, -0.08613155782222748, -0.3341391384601593, 0.27025288343429565, -0.05556320771574974, -0.5770249366760254, 0.5643517971038818, 0.6232759952545166, -0.6208903789520264, 0.6198377013206482, -0.08835957199335098, 0.49188247323036194, -1.2345365285873413, 0.11710160225629807, -0.028186623007059097, -0.031040944159030914, -0.43612658977508545, -0.23748983442783356, 0.00362069602124393, 0.25571227073669434, -0.3955130875110626, 0.5806761384010315, -0.4988711476325989, -0.27322423458099365, -0.010614601895213127, 0.18542826175689697, -0.17061622440814972, 0.5880371928215027, -0.27721792459487915, 0.8203688859939575, 0.6395754218101501, -0.3999883234500885, 0.5591817498207092, 0.5480210781097412, -0.46534332633018494, 0.25203919410705566, -0.5324838161468506, 0.1104455515742302, 0.2715030908584595, 0.2131243497133255, -0.9190192818641663, -0.37945911288261414, 0.5204209089279175, -0.667245090007782, 0.11860992014408112, -0.39526915550231934, -0.5713511109352112, -0.47555628418922424, -0.4331703782081604, 0.21730035543441772, 0.5292644500732422, -0.47189635038375854, 0.36892780661582947, 0.4862916171550751, 0.03808681666851044, -0.6542198061943054, -0.6860749125480652, -0.1108260527253151, -0.3918711841106415, -0.8019790053367615, 0.3252985179424286, -0.14777804911136627, -0.32914412021636963, -0.041375625878572464, -0.03649644926190376, -0.03166716545820236, 0.16090315580368042, 0.4231691360473633, 0.5749617218971252, -0.08276256173849106, -0.3008822202682495, -0.3340287506580353, -0.1049344539642334, 0.16860367357730865, 0.11554544419050217, 0.7144167423248291, -0.29451876878738403, -0.24387511610984802, -0.3415001630783081, 0.1861337125301361, 0.47957050800323486, -0.1418868750333786, 0.9312637448310852, 0.5988161563873291, -0.2379993498325348, -0.022631367668509483, -0.4086696207523346, -0.061736900359392166, -0.5019097924232483, 0.3811517655849457, -0.28869131207466125, -0.803280770778656, 0.9054816365242004, 0.20912937819957733, 0.13522666692733765, 0.6302944421768188, 0.6911421418190002, 0.11080040782690048, 0.7860089540481567, 0.25483742356300354, -0.21070057153701782, 0.6048563718795776, -0.6635995507240295, 0.01430745329707861, -1.027290940284729, -0.3698429465293884, -0.47126343846321106, -0.407550573348999, -0.9001955389976501, -0.3240942060947418, 0.2164224088191986, 0.19844655692577362, -0.42089009284973145, 0.495924174785614, -0.7130454182624817, 0.24625231325626373, 0.6982149481773376, 0.3087068796157837, 0.07159726321697235, -0.0805644765496254, 0.020805295556783676, 0.2172214686870575, -0.49548402428627014, -0.3884797990322113, 1.4277898073196411, 0.2627958655357361, 0.6400989294052124, 0.014163338579237461, 0.9772717952728271, 0.2637650966644287, 0.2978167235851288, -0.5468211770057678, 0.6578113436698914, 0.0794685035943985, -0.5037724375724792, -0.1593029499053955, -0.6991274356842041, -0.935430645942688, 0.2184349149465561, -0.06674860417842865, -1.0905671119689941, -0.007893777452409267, 0.03298439458012581, -0.0031488300301134586, 0.30731624364852905, -0.6890036463737488, 0.930284857749939, -0.27044397592544556, -0.3987515866756439, -0.07584144175052643, -0.8246201276779175, 0.43996667861938477, 0.18942514061927795, 0.35756537318229675, -0.2686570882797241, 0.1680716872215271, 1.1693389415740967, -0.6147667169570923, 0.7328456044197083, -0.15306027233600616, 0.1924712359905243, 0.4015420079231262, -0.2625676989555359, 0.5620951056480408, 0.033293526619672775, -0.11188077181577682, 0.4479725658893585, -0.14602020382881165, -0.2809174656867981, -0.32462403178215027, 0.97109055519104, -0.9951767921447754, -0.34522074460983276, -0.513260006904602, -0.621646523475647, 0.15960504114627838, 0.22168444097042084, 0.34190601110458374, 0.35663822293281555, 0.0658186599612236, 0.2943493127822876, 0.3089619278907776, -0.21320636570453644, 0.4731273055076599, 0.2893628776073456, -0.2105264812707901, -0.7058005332946777, 0.6448115706443787, 0.14689816534519196, 0.15992949903011322, 0.10610447824001312, 0.06457538157701492, -0.5296272039413452, -0.4128621518611908, -0.3530837893486023, 0.26718854904174805, -0.626401424407959, -0.34077948331832886, -0.4006112515926361, -0.27273625135421753, -0.4246482849121094, 0.037368811666965485, -0.4603908658027649, -0.4193708896636963, -0.4758434593677521, -0.14188222587108612, 0.7162171602249146, 0.4652751684188843, -0.2951463460922241, 0.39589014649391174, -0.8152099847793579, 0.23600640892982483, -0.26038652658462524, 0.426564484834671, -0.12725695967674255, -0.5559719204902649, -0.34284716844558716, 0.22242581844329834, -0.4000382721424103, -0.9660297632217407, 0.5611187815666199, -0.02563667856156826, 0.6944424510002136, -0.027958421036601067, 0.13863466680049896, 0.7784225344657898, -0.15666379034519196, 0.9688328504562378, 0.026806408539414406, -0.7765424251556396, 0.771533191204071, -0.2885148823261261, 0.10090989619493484, 0.5213802456855774, 0.12270128726959229, -0.5486539602279663, -0.23906370997428894, -0.9960278272628784, -1.1241756677627563, 1.0896852016448975, 0.649243950843811, -0.2224942296743393, 0.12136119604110718, 0.24505385756492615, -0.25500088930130005, 0.1359376758337021, -0.673639178276062, -0.8487530946731567, -0.09545176476240158, -0.3286139667034149, -0.010419257916510105, 0.011337305419147015, -0.3895428776741028, -0.39429065585136414, 0.8791236281394958, 0.025839222595095634, 0.5184835195541382, 0.14669619500637054, -0.10688696056604385, -0.08589330315589905, 0.27690589427948, 0.37051117420196533, 0.767701268196106, -0.2952696979045868, -0.09154669940471649, 0.30383217334747314, -0.628900408744812, 0.030585404485464096, 0.32064133882522583, 0.08613598346710205, -0.1074582189321518, 0.544003427028656, 0.9500640630722046, 0.12392255663871765, -0.37180307507514954, 0.5210774540901184, -0.09273042529821396, -0.2056206464767456, -0.495842307806015, 0.13039477169513702, 0.059769682586193085, 0.47416019439697266, 0.42160743474960327, -0.10867869853973389, -0.029016925022006035, -0.4163964092731476, 0.2928644120693207, 0.2683749496936798, 0.031234929338097572, -0.2521914541721344, 0.47100043296813965, -0.017428558319807053, -0.42435476183891296, 0.8654031753540039, -0.08944965898990631, -0.6467898488044739, 1.0512531995773315, 0.36730286478996277, 0.7719793915748596, -0.0828431099653244, 0.07011847198009491, 0.5571534037590027, 0.25496160984039307, -0.15049666166305542, 0.593629002571106, 0.07248842716217041, -0.6578993201255798, -0.24848078191280365, -0.7591084241867065, -0.28480616211891174, 0.2918812334537506, -0.9446671605110168, 0.41846349835395813, -0.10671038180589676, -0.25491124391555786, -0.15708552300930023, 0.42796361446380615, -0.7930164933204651, 0.12741051614284515, 0.021441416814923286, 0.778532862663269, -1.0059036016464233, 0.6558992266654968, 0.7723944783210754, -0.5067669153213501, -1.0268210172653198, -0.35357770323753357, 0.04977191612124443, -0.7873409986495972, 0.37357500195503235, 0.24393363296985626, 0.3886144757270813, -0.10196183621883392, -0.6196545958518982, -1.0672560930252075, 1.4903634786605835, 0.1165669783949852, -0.6165099143981934, 0.12903034687042236, 0.1169424057006836, 0.3559856712818146, -0.2431650459766388, 0.6518806219100952, 0.7928805947303772, 0.6200776696205139, 0.02329724095761776, -1.0401082038879395, 0.2804219126701355, -0.5206658244132996, -0.155656099319458, 0.3337627053260803, -0.8692082166671753, 1.009401798248291, -0.08768045157194138, -0.017756003886461258, -0.09206710010766983, 0.5083614587783813, 0.587337076663971, 0.2501335144042969, 0.44712409377098083, 0.6011236310005188, 0.6971390247344971, -0.3860415816307068, 0.9687089920043945, -0.35099881887435913, 0.9045795798301697, 1.091009497642517, 0.1145908534526825, 0.7122918367385864, 0.35582202672958374, -0.4683018922805786, 0.49845048785209656, 0.8071916699409485, -0.3394593596458435, 0.34961169958114624, 0.19360488653182983, -0.1030726432800293, -0.07399781048297882, 0.10491923987865448, -0.43790432810783386, 0.45903900265693665, 0.26752281188964844, -0.5312585234642029, -0.15384818613529205, -0.33824002742767334, 0.14082512259483337, -0.3209027647972107, -0.24780628085136414, 0.607255220413208, 0.012801261618733406, -0.5670946836471558, 0.6540024876594543, -0.09203067421913147, 0.7699525356292725, -0.623198926448822, -0.13709819316864014, -0.2189219743013382, 0.3833151161670685, -0.5407777428627014, -1.0011450052261353, 0.21340462565422058, 0.0630912259221077, -0.1374787837266922, -0.09019607305526733, 0.7035648822784424, -0.3121478855609894, -0.5324943661689758, 0.4180371165275574, 0.407503217458725, 0.3388354778289795, 0.10138457268476486, -0.8614729046821594, 0.31489041447639465, 0.26065558195114136, -0.7711968421936035, 0.3997110426425934, 0.24360255897045135, 0.024838311597704887, 0.6055013537406921, 0.7429582476615906, 0.1622755378484726, 0.13494692742824554, -0.06599191576242447, 1.166869044303894, -0.7045903205871582, -0.42799532413482666, -0.9066397547721863, 0.8712188005447388, -0.3122081756591797, -0.6586569547653198, 0.8829380869865417, 0.8969178199768066, 0.7468953728675842, 0.19249704480171204, 0.7981315851211548, -0.5700206160545349, 0.4232494831085205, -0.3039761483669281, 0.8681265711784363, -0.5923893451690674, 0.2672785520553589, -0.23428389430046082, -0.841591477394104, -0.023780247196555138, 0.7636426091194153, -0.2330964356660843, -0.011080284602940083, 0.6156247854232788, 0.9889297485351562, 0.009529897943139076, 0.15788845717906952, -0.04792901128530502, 0.37522366642951965, 0.33385443687438965, 0.7124565839767456, 0.7193999886512756, -0.6793143153190613, 0.5300041437149048, -0.6878176927566528, -0.46440988779067993, -0.08985628932714462, -0.6696901917457581, -0.9161192178726196, -0.5438095331192017, -0.2899917662143707, -0.6357261538505554, -0.00256725843064487, 1.114729881286621, 0.47528281807899475, -0.8372722268104553, -0.4263836741447449, -0.05673526972532272, 0.1715833991765976, -0.13302205502986908, -0.33737707138061523, 0.5576863288879395, -0.06141868606209755, -0.710992157459259, 0.3175429105758667, -0.2337256819009781, -0.21251451969146729, -0.06740066409111023, -0.19316089153289795, -0.39845308661460876, -0.37945637106895447, 0.3405779302120209, 0.14343658089637756, -0.5991306900978088, -0.3883405923843384, -0.10117043554782867, -0.054471373558044434, 0.3308156132698059, 0.3675082325935364, -0.5620655417442322, 0.05396117642521858, 0.6448091864585876, 0.21791572868824005, 0.6896576285362244, 0.07501144707202911, 0.30941811203956604, -0.7519309520721436, 0.0509638674557209, 0.050287049263715744, 0.519825279712677, 0.25277218222618103, -0.48778584599494934, 0.8114393353462219, 0.33208462595939636, -0.832176148891449, -0.8775036931037903, -0.22873522341251373, -1.1522245407104492, 0.0675726979970932, 1.4295603036880493, -0.3325420022010803, -0.37853342294692993, 0.020288662984967232, -0.2620754539966583, 0.4989995062351227, -0.7075951099395752, 0.605907142162323, 0.7685025334358215, -0.3287377953529358, -0.06896407157182693, -0.5547841191291809, 0.35654106736183167, 0.029581863433122635, -1.0751570463180542, 0.10792867094278336, 0.4700528681278229, 0.4912140965461731, 0.21996858716011047, 0.7241572737693787, 0.041814010590314865, -0.07617262750864029, 0.04292067512869835, 0.22749070823192596, -0.16224627196788788, -0.16529180109500885, -0.1930592656135559, 0.03204254060983658, -0.3253803849220276, -0.5010672211647034 ]
open-llm-leaderboard/details_togethercomputer__GPT-NeoXT-Chat-Base-20B
open-llm-leaderboard
2023-10-21T19:34:31Z
200
0
[ "region:us" ]
null
2023-08-18T11:49:23Z
--- pretty_name: Evaluation run of togethercomputer/GPT-NeoXT-Chat-Base-20B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [togethercomputer/GPT-NeoXT-Chat-Base-20B](https://huggingface.co/togethercomputer/GPT-NeoXT-Chat-Base-20B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_togethercomputer__GPT-NeoXT-Chat-Base-20B\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T19:34:18.998946](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__GPT-NeoXT-Chat-Base-20B/blob/main/results_2023-10-21T19-34-18.998946.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0014681208053691276,\n\ \ \"em_stderr\": 0.0003921042190298267,\n \"f1\": 0.0505337667785235,\n\ \ \"f1_stderr\": 0.001244239988388026,\n \"acc\": 0.3699338727947376,\n\ \ \"acc_stderr\": 0.010093691461965018\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0014681208053691276,\n \"em_stderr\": 0.0003921042190298267,\n\ \ \"f1\": 0.0505337667785235,\n \"f1_stderr\": 0.001244239988388026\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06899166034874905,\n \ \ \"acc_stderr\": 0.00698099583483858\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6708760852407262,\n \"acc_stderr\": 0.013206387089091458\n\ \ }\n}\n```" repo_url: https://huggingface.co/togethercomputer/GPT-NeoXT-Chat-Base-20B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|arc:challenge|25_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T21:40:44.259947.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T19_34_18.998946 path: - '**/details_harness|drop|3_2023-10-21T19-34-18.998946.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T19-34-18.998946.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T19_34_18.998946 path: - '**/details_harness|gsm8k|5_2023-10-21T19-34-18.998946.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T19-34-18.998946.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hellaswag|10_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:40:44.259947.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:40:44.259947.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T21_40_44.259947 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T21:40:44.259947.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T21:40:44.259947.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T19_34_18.998946 path: - '**/details_harness|winogrande|5_2023-10-21T19-34-18.998946.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T19-34-18.998946.parquet' - config_name: results data_files: - split: 2023_07_19T21_40_44.259947 path: - results_2023-07-19T21:40:44.259947.parquet - split: 2023_10_21T19_34_18.998946 path: - results_2023-10-21T19-34-18.998946.parquet - split: latest path: - results_2023-10-21T19-34-18.998946.parquet --- # Dataset Card for Evaluation run of togethercomputer/GPT-NeoXT-Chat-Base-20B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/togethercomputer/GPT-NeoXT-Chat-Base-20B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [togethercomputer/GPT-NeoXT-Chat-Base-20B](https://huggingface.co/togethercomputer/GPT-NeoXT-Chat-Base-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_togethercomputer__GPT-NeoXT-Chat-Base-20B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T19:34:18.998946](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__GPT-NeoXT-Chat-Base-20B/blob/main/results_2023-10-21T19-34-18.998946.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0014681208053691276, "em_stderr": 0.0003921042190298267, "f1": 0.0505337667785235, "f1_stderr": 0.001244239988388026, "acc": 0.3699338727947376, "acc_stderr": 0.010093691461965018 }, "harness|drop|3": { "em": 0.0014681208053691276, "em_stderr": 0.0003921042190298267, "f1": 0.0505337667785235, "f1_stderr": 0.001244239988388026 }, "harness|gsm8k|5": { "acc": 0.06899166034874905, "acc_stderr": 0.00698099583483858 }, "harness|winogrande|5": { "acc": 0.6708760852407262, "acc_stderr": 0.013206387089091458 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4338851571083069, -0.7640626430511475, 0.2611299455165863, 0.2622620761394501, -0.1563270539045334, 0.21949025988578796, -0.39876672625541687, -0.22341199219226837, 0.4497923254966736, 0.46667104959487915, -0.755073606967926, -0.888693630695343, -0.6763491034507751, 0.12132356315851212, -0.22549033164978027, 1.1362007856369019, -0.09551657736301422, -0.32299748063087463, -0.041787292808294296, -0.20636104047298431, -0.3987760543823242, -0.5030028820037842, -0.6228313446044922, -0.3777477741241455, 0.26406633853912354, 0.5692341327667236, 0.47032880783081055, 0.6412586569786072, 0.5694416761398315, 0.43759289383888245, -0.08808976411819458, 0.15856587886810303, -0.5113113522529602, -0.1145155131816864, 0.2537950575351715, -0.5395065546035767, -0.8031783699989319, 0.11665084213018417, 0.6247808337211609, 0.4205567538738251, -0.14638687670230865, 0.46362581849098206, 0.16878610849380493, 0.4285569489002228, -0.3758630156517029, 0.3567067086696625, -0.3455875515937805, -0.17648865282535553, -0.23542918264865875, -0.15234042704105377, -0.06897079944610596, -0.35448458790779114, -0.1297580748796463, -0.5691922307014465, 0.11563630402088165, 0.06758259981870651, 1.0380300283432007, 0.0769849419593811, -0.12744072079658508, -0.20920734107494354, -0.4255358874797821, 0.8750023245811462, -0.8637915253639221, 0.03748033940792084, 0.6068998575210571, 0.12268126755952835, -0.2895936071872711, -0.6604928374290466, -0.3617752492427826, -0.1023888811469078, -0.30124029517173767, 0.20577046275138855, -0.14959262311458588, -0.1271866410970688, 0.41028815507888794, 0.5213522911071777, -0.7213704586029053, -0.002929757349193096, -0.43991202116012573, -0.25734782218933105, 0.8733437061309814, 0.352252721786499, 0.16551683843135834, -0.40939396619796753, -0.35220345854759216, -0.3723072111606598, -0.3937830924987793, 0.05418070778250694, 0.459307998418808, 0.5055232644081116, -0.7339413166046143, 0.6841095685958862, -0.40700486302375793, 0.544981837272644, -0.14197561144828796, -0.22546234726905823, 0.7735926508903503, -0.7529373168945312, -0.17348244786262512, -0.10273359715938568, 1.1958996057510376, 0.44178903102874756, 0.0976811945438385, 0.15851128101348877, -0.20433884859085083, 0.05447054281830788, 0.039083290845155716, -0.8050680160522461, -0.09832152724266052, 0.39828088879585266, -0.49259376525878906, -0.33222848176956177, 0.15626059472560883, -0.8619779944419861, -0.07876966893672943, -0.30092427134513855, 0.2605433166027069, -0.23824019730091095, -0.47939711809158325, -0.13363905251026154, -0.1996104121208191, 0.176386296749115, 0.16854052245616913, -0.5875241756439209, 0.551885187625885, 0.6933329701423645, 1.0145647525787354, -0.07909878343343735, -0.3097843825817108, -0.280204176902771, -0.34984278678894043, -0.09433409571647644, 0.45796921849250793, -0.23499318957328796, -0.43686312437057495, -0.26963919401168823, 0.28921839594841003, -0.33053457736968994, -0.4983830749988556, 0.5653985142707825, -0.24222980439662933, 0.3246248662471771, -0.19260898232460022, -0.45749160647392273, 0.053075507283210754, 0.4337192177772522, -0.5552506446838379, 1.3810467720031738, 0.40236085653305054, -0.9242175817489624, 0.07202940434217453, -0.7607890367507935, -0.15057702362537384, 0.09974349290132523, -0.08641393482685089, -0.552611231803894, -0.25175872445106506, 0.11105519533157349, 0.4756776690483093, -0.35381776094436646, 0.03154362365603447, -0.30202656984329224, -0.46025413274765015, 0.17418405413627625, -0.11446414142847061, 1.0415149927139282, 0.1361260861158371, -0.5013794898986816, 0.22393615543842316, -0.9210042953491211, 0.1822730004787445, 0.28062671422958374, -0.3972243666648865, -0.3180004358291626, -0.27101147174835205, 0.22485482692718506, 0.26818645000457764, 0.5272848010063171, -0.6338339447975159, 0.34464752674102783, -0.18562749028205872, 0.4291386604309082, 0.9187934398651123, -0.03480619192123413, 0.27797845005989075, -0.33503422141075134, 0.520858108997345, -0.03557929769158363, 0.4178285598754883, 0.19750064611434937, -0.5787897706031799, -0.7074844837188721, -0.19300632178783417, 0.14593404531478882, 0.7555941343307495, -0.5627387166023254, 0.685081422328949, -0.43896254897117615, -0.6711015701293945, -0.6060800552368164, 0.07372020930051804, 0.5038179755210876, 0.4834762215614319, 0.3424375057220459, -0.34289059042930603, -0.7365249395370483, -0.9879044890403748, 0.15704722702503204, -0.22531047463417053, 0.03871743753552437, 0.6133100390434265, 0.9621091485023499, -0.45814067125320435, 0.7307096719741821, -0.652248740196228, -0.22043390572071075, -0.3161233961582184, 0.07871775329113007, 0.76220703125, 0.49463778734207153, 0.4499507546424866, -0.6280399560928345, -0.41850394010543823, 0.03370707854628563, -0.7061386704444885, -0.1830955296754837, -0.1531815081834793, -0.20895038545131683, 0.22599807381629944, 0.06435024738311768, -0.512115478515625, 0.5165265798568726, 0.5802823305130005, -0.6097350120544434, 0.6723713278770447, -0.12228032946586609, 0.38177698850631714, -1.2841664552688599, 0.12669318914413452, -0.014361988753080368, -0.09849490970373154, -0.388485312461853, -0.2990572452545166, 0.026659520342946053, 0.30540943145751953, -0.40835559368133545, 0.6760002970695496, -0.3923752009868622, -0.21785257756710052, -0.10556206852197647, 0.19646531343460083, -0.16853909194469452, 0.4613294005393982, -0.16056619584560394, 0.8444817066192627, 0.5878037810325623, -0.37920546531677246, 0.4642939567565918, 0.46550869941711426, -0.3571583032608032, 0.287798136472702, -0.6247733235359192, 0.129196435213089, 0.29408350586891174, 0.1819559782743454, -0.9294055104255676, -0.280899316072464, 0.4375242292881012, -0.6212344169616699, 0.15975971519947052, -0.39852264523506165, -0.508838415145874, -0.4829968214035034, -0.4009428024291992, 0.16710160672664642, 0.46072399616241455, -0.32535284757614136, 0.22335995733737946, 0.46664729714393616, -0.01056721806526184, -0.7687584757804871, -0.5888979434967041, 0.0028994972817599773, -0.3160262405872345, -0.7413238883018494, 0.40060707926750183, -0.16200456023216248, -0.3058687746524811, 0.011532600037753582, -0.03401843085885048, -0.043139439076185226, 0.11186493188142776, 0.3821403682231903, 0.571012556552887, -0.09266374260187149, -0.36903586983680725, -0.3607780933380127, -0.25359678268432617, 0.17895188927650452, 0.06225987523794174, 0.7102444171905518, -0.3317180573940277, -0.1467776745557785, -0.25960659980773926, 0.2253374606370926, 0.4508480131626129, 0.023080192506313324, 0.8589243292808533, 0.7659299373626709, -0.17968358099460602, -0.00008540063572581857, -0.3784615993499756, -0.18199780583381653, -0.49814704060554504, 0.37252935767173767, -0.248451367020607, -0.9831327199935913, 0.9021874666213989, 0.23226773738861084, 0.09074760973453522, 0.6578512191772461, 0.5755845904350281, 0.16942067444324493, 0.8278330564498901, 0.3824622929096222, -0.19821222126483917, 0.5757490992546082, -0.6955288052558899, -0.1066707894206047, -1.0331376791000366, -0.2804601192474365, -0.5118699073791504, -0.35442036390304565, -0.9315213561058044, -0.31206151843070984, 0.19301533699035645, 0.007332642562687397, -0.3947320878505707, 0.6721974015235901, -0.6687494516372681, 0.2563856542110443, 0.6934224367141724, 0.15385666489601135, 0.07231244444847107, -0.18909507989883423, -0.12742169201374054, 0.2065849006175995, -0.4685504734516144, -0.3433828353881836, 1.3758177757263184, 0.20360015332698822, 0.6793298721313477, -0.0645863339304924, 0.9599630236625671, 0.203658327460289, 0.32373183965682983, -0.5017261505126953, 0.6754335761070251, -0.0007195137441158295, -0.5207270979881287, -0.15912340581417084, -0.8061109781265259, -0.9698582291603088, 0.10952205210924149, -0.037533294409513474, -1.0121618509292603, 0.0648147314786911, -0.0598934143781662, -0.040372394025325775, 0.22728529572486877, -0.6145258545875549, 0.9173882007598877, -0.22277960181236267, -0.4196776747703552, -0.022541604936122894, -0.8738598227500916, 0.4068450927734375, 0.12454521656036377, 0.37536191940307617, -0.22166211903095245, 0.06203194707632065, 1.1360218524932861, -0.6389032602310181, 0.7519574165344238, -0.1393585056066513, 0.15297381579875946, 0.33568844199180603, -0.3201192021369934, 0.5714828968048096, 0.10022717714309692, -0.10051173716783524, 0.41046518087387085, -0.1385890394449234, -0.21604254841804504, -0.31953468918800354, 0.9628588557243347, -1.0136486291885376, -0.29955247044563293, -0.4565955400466919, -0.57353675365448, 0.1740441769361496, 0.28295913338661194, 0.34789302945137024, 0.36578646302223206, -0.056323472410440445, 0.24905367195606232, 0.30401867628097534, -0.2258368283510208, 0.5370626449584961, 0.3906046450138092, -0.21379919350147247, -0.7394526600837708, 0.653170645236969, 0.1597069799900055, 0.2128239870071411, 0.22512762248516083, 0.05098242685198784, -0.5738247632980347, -0.4205784499645233, -0.2886754274368286, 0.29343125224113464, -0.4638030529022217, -0.2792445123195648, -0.4961096942424774, -0.32862159609794617, -0.46735435724258423, 0.07852834463119507, -0.5477738380432129, -0.47659817337989807, -0.34804731607437134, -0.13171769678592682, 0.663496196269989, 0.5177202224731445, -0.37348055839538574, 0.4405609667301178, -0.7513527870178223, 0.21576793491840363, -0.07613300532102585, 0.4148598611354828, -0.28812023997306824, -0.5086139440536499, -0.3651292324066162, 0.20967960357666016, -0.3457738757133484, -0.7985284924507141, 0.6125438213348389, -0.008844499476253986, 0.6452216506004333, 0.05185062810778618, 0.09233877062797546, 0.7766379117965698, -0.16484473645687103, 0.8739387392997742, 0.010287432000041008, -0.7318134903907776, 0.8176026940345764, -0.36460235714912415, 0.14527931809425354, 0.48174092173576355, 0.21780070662498474, -0.6519303321838379, -0.27732542157173157, -1.0566833019256592, -1.1309828758239746, 1.0848971605300903, 0.6456843614578247, -0.19724711775779724, 0.03149685263633728, 0.2676699161529541, -0.17703773081302643, 0.06260208785533905, -0.6670182347297668, -0.7919846177101135, -0.1293688714504242, -0.17851121723651886, -0.08428221195936203, 0.019029617309570312, -0.4187588393688202, -0.3854754567146301, 0.8166890144348145, -0.03344859182834625, 0.5928959250450134, 0.1301649510860443, 0.016643958166241646, -0.009450217708945274, 0.3778800070285797, 0.5292106866836548, 0.7457932233810425, -0.46254435181617737, -0.06814509630203247, 0.2741435170173645, -0.6975074410438538, 0.031085556373000145, 0.3526444137096405, 0.07700932770967484, -0.09388166666030884, 0.5173892974853516, 0.9487013816833496, 0.08461791276931763, -0.3211457133293152, 0.5444883108139038, -0.24300161004066467, -0.34651902318000793, -0.46568605303764343, 0.16554807126522064, 0.062474776059389114, 0.3158780634403229, 0.36712875962257385, -0.07055854797363281, -0.08279730379581451, -0.3801528513431549, 0.3867918848991394, 0.3651793897151947, -0.2048451006412506, -0.3534400463104248, 0.5321998596191406, -0.0372978039085865, -0.31561279296875, 0.879776656627655, -0.14462873339653015, -0.5816663503646851, 1.0194222927093506, 0.4645158350467682, 0.8456023931503296, -0.11749887466430664, 0.17274940013885498, 0.5626155138015747, 0.3634713590145111, -0.11277500540018082, 0.5851535201072693, 0.10596006363630295, -0.7444573640823364, -0.44791996479034424, -0.7140189409255981, -0.3852739930152893, 0.34598398208618164, -0.9074018597602844, 0.3345060348510742, -0.10661143809556961, -0.36317020654678345, -0.13700535893440247, 0.4197740852832794, -0.778878390789032, 0.11730575561523438, 0.044040847569704056, 0.8125339150428772, -1.0439549684524536, 0.7195262312889099, 0.7752013206481934, -0.3424437344074249, -0.9002347588539124, -0.4738101065158844, 0.09860511869192123, -0.8729972839355469, 0.39885032176971436, 0.1946120262145996, 0.3803127408027649, -0.1095108687877655, -0.6066011190414429, -1.0103944540023804, 1.3849819898605347, 0.16115213930606842, -0.5827356576919556, 0.1910698264837265, 0.26519858837127686, 0.39780327677726746, -0.3327029049396515, 0.7916812896728516, 0.7060849070549011, 0.6865562200546265, 0.02158883959054947, -1.0593719482421875, 0.29493018984794617, -0.5797572731971741, -0.18172462284564972, 0.35353052616119385, -0.8337011337280273, 0.982168972492218, -0.09969380497932434, -0.03437020629644394, -0.19077572226524353, 0.3586566746234894, 0.4943264126777649, 0.2500825822353363, 0.5083494186401367, 0.707714855670929, 0.5740020871162415, -0.46230465173721313, 1.0165228843688965, -0.2709290385246277, 0.841712474822998, 1.1399630308151245, 0.11003140360116959, 0.6515304446220398, 0.2421478033065796, -0.45670029520988464, 0.5358065366744995, 0.7818443179130554, -0.22371892631053925, 0.364440381526947, 0.09787948429584503, -0.04255818948149681, -0.1236104667186737, 0.08322584629058838, -0.5037451386451721, 0.4288767874240875, 0.3304109573364258, -0.527934193611145, -0.2248118370771408, -0.3333110511302948, 0.17664512991905212, -0.39878755807876587, -0.1536700576543808, 0.7030231356620789, 0.050110943615436554, -0.5098682641983032, 0.7222790122032166, -0.13081824779510498, 0.7538819909095764, -0.6961326599121094, -0.07985188066959381, -0.22428739070892334, 0.35858622193336487, -0.4296755790710449, -1.026466727256775, 0.2975428104400635, 0.021030044183135033, -0.11139591783285141, -0.2568662464618683, 0.6938856244087219, -0.2218698263168335, -0.4760683476924896, 0.4989849030971527, 0.47750192880630493, 0.30446407198905945, -0.003451996250078082, -0.9963470697402954, 0.25917601585388184, 0.269278883934021, -0.6936556100845337, 0.4667048156261444, 0.2692796289920807, 0.03355051577091217, 0.5696230530738831, 0.7528354525566101, 0.08417892456054688, -0.018278498202562332, 0.007114854175597429, 1.1902786493301392, -0.8169667720794678, -0.405871719121933, -0.8967822790145874, 0.8084770441055298, -0.37105628848075867, -0.7120313048362732, 0.8686884641647339, 0.8935190439224243, 0.7586225271224976, 0.21072013676166534, 0.8050176501274109, -0.5428962707519531, 0.458100289106369, -0.18150119483470917, 0.7699234485626221, -0.5800702571868896, 0.2997758090496063, -0.29097095131874084, -0.9214935302734375, -0.021364010870456696, 0.6328178644180298, -0.262408971786499, 0.019437329843640327, 0.590957522392273, 1.0127571821212769, -0.009649300016462803, 0.1528419405221939, -0.017908353358507156, 0.4103733003139496, 0.33873188495635986, 0.7024243474006653, 0.7244724035263062, -0.6795817613601685, 0.5294074416160583, -0.6858195662498474, -0.5290913581848145, -0.09709399193525314, -0.7180501222610474, -0.8203070759773254, -0.5102712512016296, -0.39160895347595215, -0.5975779891014099, 0.019089940935373306, 0.9672499299049377, 0.591974675655365, -0.8151342868804932, -0.43709421157836914, -0.0651584267616272, 0.21304745972156525, -0.26598772406578064, -0.3435370922088623, 0.543803334236145, -0.08552693575620651, -0.7406328320503235, 0.3509823679924011, -0.12687869369983673, -0.1489967554807663, 0.04266645014286041, -0.2016535848379135, -0.34178638458251953, -0.35419774055480957, 0.41561558842658997, 0.22199900448322296, -0.5092461705207825, -0.3742966055870056, 0.03195677697658539, -0.09241566061973572, 0.2955686151981354, 0.29019680619239807, -0.6489377021789551, 0.03360525891184807, 0.6024910807609558, 0.1579943150281906, 0.7183942198753357, 0.052105724811553955, 0.3133803904056549, -0.8413605690002441, -0.013756669126451015, 0.07135992497205734, 0.4522780776023865, 0.1863379329442978, -0.3786501884460449, 0.926520049571991, 0.3816227614879608, -0.8097255229949951, -0.8187177181243896, -0.24717620015144348, -1.295472264289856, 0.059776026755571365, 1.4316829442977905, -0.24673083424568176, -0.3598122298717499, -0.08208820223808289, -0.1998736411333084, 0.35696616768836975, -0.6951647400856018, 0.5053512454032898, 0.6801348924636841, -0.3297627568244934, -0.07883014529943466, -0.5724413394927979, 0.3863840401172638, -0.025699585676193237, -1.057470440864563, 0.08194596320390701, 0.3059709370136261, 0.4754793345928192, 0.2819945216178894, 0.7102989554405212, -0.03199997544288635, -0.11769229173660278, 0.051144592463970184, 0.21992847323417664, -0.1398821473121643, -0.06163585186004639, -0.042227406054735184, 0.029535269364714622, -0.4643482565879822, -0.38438600301742554 ]
open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-7B-Instruct
open-llm-leaderboard
2023-10-19T05:42:49Z
200
0
[ "region:us" ]
null
2023-08-18T11:51:39Z
--- pretty_name: Evaluation run of togethercomputer/RedPajama-INCITE-7B-Instruct dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [togethercomputer/RedPajama-INCITE-7B-Instruct](https://huggingface.co/togethercomputer/RedPajama-INCITE-7B-Instruct)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-7B-Instruct\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T05:42:36.863532](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-7B-Instruct/blob/main/results_2023-10-19T05-42-36.863532.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n\ \ \"em_stderr\": 0.0003476179896857104,\n \"f1\": 0.04208578020134259,\n\ \ \"f1_stderr\": 0.00114625984545935,\n \"acc\": 0.3327435280488615,\n\ \ \"acc_stderr\": 0.008428433474529594\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857104,\n\ \ \"f1\": 0.04208578020134259,\n \"f1_stderr\": 0.00114625984545935\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01592115238817286,\n \ \ \"acc_stderr\": 0.0034478192723889985\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6495659037095501,\n \"acc_stderr\": 0.013409047676670187\n\ \ }\n}\n```" repo_url: https://huggingface.co/togethercomputer/RedPajama-INCITE-7B-Instruct leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|arc:challenge|25_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T16:41:06.835084.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T05_42_36.863532 path: - '**/details_harness|drop|3_2023-10-19T05-42-36.863532.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T05-42-36.863532.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T05_42_36.863532 path: - '**/details_harness|gsm8k|5_2023-10-19T05-42-36.863532.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T05-42-36.863532.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hellaswag|10_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:41:06.835084.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:41:06.835084.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T16_41_06.835084 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T16:41:06.835084.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T16:41:06.835084.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T05_42_36.863532 path: - '**/details_harness|winogrande|5_2023-10-19T05-42-36.863532.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T05-42-36.863532.parquet' - config_name: results data_files: - split: 2023_07_19T16_41_06.835084 path: - results_2023-07-19T16:41:06.835084.parquet - split: 2023_10_19T05_42_36.863532 path: - results_2023-10-19T05-42-36.863532.parquet - split: latest path: - results_2023-10-19T05-42-36.863532.parquet --- # Dataset Card for Evaluation run of togethercomputer/RedPajama-INCITE-7B-Instruct ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/togethercomputer/RedPajama-INCITE-7B-Instruct - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [togethercomputer/RedPajama-INCITE-7B-Instruct](https://huggingface.co/togethercomputer/RedPajama-INCITE-7B-Instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-7B-Instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T05:42:36.863532](https://huggingface.co/datasets/open-llm-leaderboard/details_togethercomputer__RedPajama-INCITE-7B-Instruct/blob/main/results_2023-10-19T05-42-36.863532.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857104, "f1": 0.04208578020134259, "f1_stderr": 0.00114625984545935, "acc": 0.3327435280488615, "acc_stderr": 0.008428433474529594 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857104, "f1": 0.04208578020134259, "f1_stderr": 0.00114625984545935 }, "harness|gsm8k|5": { "acc": 0.01592115238817286, "acc_stderr": 0.0034478192723889985 }, "harness|winogrande|5": { "acc": 0.6495659037095501, "acc_stderr": 0.013409047676670187 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4800240099430084, -0.7066265344619751, 0.21737036108970642, 0.31413769721984863, -0.15724587440490723, 0.12921135127544403, -0.4496569037437439, -0.2511555552482605, 0.49485841393470764, 0.5740979313850403, -0.7705379724502563, -0.9123013019561768, -0.7621015906333923, 0.18713143467903137, -0.18526805937290192, 1.1084299087524414, -0.2588985562324524, -0.24523800611495972, -0.1715325266122818, -0.32170814275741577, -0.4614201486110687, -0.43561747670173645, -0.4265209436416626, -0.5148255228996277, 0.27764660120010376, 0.6308541893959045, 0.4040048122406006, 0.5971960425376892, 0.721281111240387, 0.41503897309303284, -0.08172253519296646, 0.2147994488477707, -0.4092405140399933, -0.02106432616710663, 0.26605021953582764, -0.6114668846130371, -0.70205157995224, 0.0890820100903511, 0.6470398902893066, 0.49494850635528564, -0.1687600165605545, 0.6718111634254456, 0.10922791063785553, 0.6416417360305786, -0.4992762506008148, 0.3926979899406433, -0.3264762759208679, -0.03847106918692589, -0.35557952523231506, -0.15781991183757782, -0.024679720401763916, -0.37706735730171204, -0.22910910844802856, -0.5427829027175903, 0.1369181126356125, 0.03484203293919563, 1.0049556493759155, 0.23111525177955627, -0.10476855933666229, -0.22013477981090546, -0.2963646352291107, 0.8341525793075562, -0.8192257881164551, -0.05018249899148941, 0.6544721722602844, 0.14341887831687927, -0.3510744273662567, -0.648268461227417, -0.3818701505661011, -0.07610461115837097, -0.3340582251548767, 0.1808636486530304, 0.015814997255802155, -0.17375659942626953, 0.4188443422317505, 0.6072244644165039, -0.6333954930305481, -0.05243797227740288, -0.5395445823669434, -0.06164322420954704, 1.0156110525131226, 0.3606174886226654, 0.18080787360668182, -0.4266701638698578, -0.3701939880847931, -0.3590497672557831, -0.4317278563976288, 0.17283184826374054, 0.3967908024787903, 0.5202228426933289, -0.7019753456115723, 0.8059186339378357, -0.3994087278842926, 0.5234553217887878, -0.104198157787323, -0.2583140730857849, 0.8294059038162231, -0.6218777298927307, -0.2506900131702423, -0.026574628427624702, 1.0761476755142212, 0.4234057366847992, 0.08056528121232986, 0.21863025426864624, -0.18198919296264648, 0.08736377954483032, 0.15168273448944092, -0.7247929573059082, -0.21197479963302612, 0.4024936258792877, -0.5745611786842346, -0.3049744963645935, 0.2039439082145691, -0.9818353056907654, -0.21552366018295288, -0.319545716047287, 0.2690318524837494, -0.15142704546451569, -0.3794945776462555, -0.12731468677520752, -0.1339619755744934, 0.1783236861228943, 0.18535003066062927, -0.6254469156265259, 0.4760907292366028, 0.6368974447250366, 0.9425208568572998, -0.17000675201416016, -0.40415358543395996, -0.28221961855888367, -0.28057345747947693, -0.08151500672101974, 0.4939790964126587, -0.23040753602981567, -0.42184221744537354, -0.22360895574092865, 0.31002292037010193, -0.36389949917793274, -0.6178843379020691, 0.5864664912223816, -0.21267163753509521, 0.22241315245628357, -0.20438680052757263, -0.43981510400772095, -0.1799962967634201, 0.44328737258911133, -0.6393190622329712, 1.4384976625442505, 0.2807522118091583, -0.9175530672073364, 0.11404537409543991, -0.8960059881210327, -0.253801554441452, 0.05894947424530983, -0.07598656415939331, -0.6725134253501892, -0.15422476828098297, 0.16440856456756592, 0.5729536414146423, -0.36523860692977905, 0.08983033895492554, -0.30747923254966736, -0.4461548924446106, 0.17593736946582794, -0.04143619164824486, 1.101562261581421, 0.16334767639636993, -0.45864927768707275, 0.17110148072242737, -0.9943246245384216, 0.12970560789108276, 0.40808138251304626, -0.4879421889781952, -0.24214746057987213, -0.28207385540008545, 0.06639225780963898, 0.1534915566444397, 0.5974189639091492, -0.5688799023628235, 0.39163491129875183, -0.1685974895954132, 0.29524630308151245, 0.8961731791496277, 0.045874807983636856, 0.23685410618782043, -0.45783352851867676, 0.6182295083999634, -0.04089553281664848, 0.35571131110191345, 0.10692571103572845, -0.5993211269378662, -0.8146600127220154, -0.23283499479293823, 0.08056364208459854, 0.6350477933883667, -0.3868664801120758, 0.6854464411735535, -0.36041995882987976, -0.6684343814849854, -0.7315335273742676, 0.10923649370670319, 0.45170292258262634, 0.4731273949146271, 0.42469221353530884, -0.3099289536476135, -0.7353653907775879, -1.0257964134216309, 0.05166022107005119, -0.16633829474449158, 0.09755115956068039, 0.5055071711540222, 1.011743187904358, -0.3820624351501465, 0.6540489792823792, -0.6997112035751343, -0.3278936445713043, -0.2636486887931824, 0.008781962096691132, 0.8202275633811951, 0.5253511667251587, 0.3857426941394806, -0.6387519836425781, -0.3748159110546112, -0.04252494126558304, -0.8147648572921753, -0.1850021481513977, -0.13125891983509064, -0.2284197360277176, 0.2652702331542969, -0.051400769501924515, -0.5118082165718079, 0.4896271526813507, 0.6164515018463135, -0.5996871590614319, 0.6059260368347168, -0.09809738397598267, 0.5179822444915771, -1.1720458269119263, 0.17778612673282623, 0.018387537449598312, -0.06112627685070038, -0.3647584319114685, -0.06042524799704552, 0.009754796512424946, 0.27388569712638855, -0.40828678011894226, 0.6204810738563538, -0.44703391194343567, -0.29604437947273254, 0.012595774605870247, 0.13971096277236938, -0.0782693549990654, 0.5727765560150146, -0.32784104347229004, 0.8342990875244141, 0.47680240869522095, -0.4143334925174713, 0.3946913778781891, 0.5104756355285645, -0.5285654067993164, 0.24229907989501953, -0.4694979786872864, 0.00196650018915534, 0.21369053423404694, 0.13950307667255402, -0.8780285716056824, -0.4017053246498108, 0.46230247616767883, -0.5955270528793335, 0.2529993951320648, -0.3264177143573761, -0.6187857985496521, -0.4865873157978058, -0.48665446043014526, 0.1911061555147171, 0.5189211368560791, -0.5438082814216614, 0.30025628209114075, 0.5872278809547424, -0.0056265066377818584, -0.6605035662651062, -0.7260862588882446, -0.15033744275569916, -0.40125665068626404, -0.761564314365387, 0.33680295944213867, -0.21062219142913818, -0.3405638039112091, -0.041053272783756256, 0.031009826809167862, -0.0875185951590538, 0.27203765511512756, 0.45763319730758667, 0.5392967462539673, 0.021987497806549072, -0.3141205906867981, -0.20913492143154144, -0.12610411643981934, 0.19758303463459015, 0.22638818621635437, 0.5961631536483765, -0.26096785068511963, -0.25539475679397583, -0.2571418881416321, 0.18984998762607574, 0.4720015227794647, -0.07681164890527725, 0.8505313992500305, 0.6092914342880249, -0.24084968864917755, -0.05538360774517059, -0.3558373749256134, -0.030063319951295853, -0.48002809286117554, 0.25997909903526306, -0.30955737829208374, -0.8310636878013611, 0.8297610282897949, 0.12492050975561142, 0.1546298861503601, 0.6814242601394653, 0.6805050373077393, 0.06084718182682991, 0.7486192584037781, 0.2932180166244507, -0.06584981083869934, 0.5027565360069275, -0.7279683351516724, -0.14871133863925934, -1.086219072341919, -0.4555591940879822, -0.5403302311897278, -0.40358176827430725, -0.8437367677688599, -0.3113290071487427, 0.28434357047080994, 0.1719054877758026, -0.4350963532924652, 0.5717158913612366, -0.7814301252365112, 0.26346150040626526, 0.6862673163414001, 0.19090738892555237, 0.10231605917215347, -0.08243738859891891, -0.07304347306489944, 0.1827889233827591, -0.47162920236587524, -0.3158593773841858, 1.3164176940917969, 0.2560994029045105, 0.6997309327125549, -0.03772056847810745, 1.0348992347717285, 0.30960673093795776, 0.3450610637664795, -0.4673616588115692, 0.6424942016601562, -0.04464218392968178, -0.5060103535652161, -0.10592325031757355, -0.5910235047340393, -0.9882469773292542, 0.25266242027282715, -0.10403574258089066, -0.9844449758529663, 0.02015049010515213, 0.0045837657526135445, 0.012763164937496185, 0.25690022110939026, -0.6187558174133301, 0.8702660799026489, -0.21835485100746155, -0.36816027760505676, -0.04029643163084984, -0.8467605710029602, 0.49233922362327576, 0.06613044440746307, 0.3765642046928406, -0.28346899151802063, 0.09003209322690964, 1.1882929801940918, -0.6028987169265747, 0.7757551670074463, -0.1864972561597824, 0.12875276803970337, 0.3860209584236145, -0.3370357155799866, 0.6148126721382141, -0.01793217472732067, -0.1778104156255722, 0.5556651949882507, -0.1712493896484375, -0.21375755965709686, -0.3171652555465698, 0.9770752191543579, -0.9465422034263611, -0.3907989263534546, -0.4746756851673126, -0.5848048329353333, 0.21239373087882996, 0.23186273872852325, 0.3285846412181854, 0.21220119297504425, 0.11365848779678345, 0.24980179965496063, 0.2831353545188904, -0.1980152577161789, 0.5243200063705444, 0.35550400614738464, -0.20699791610240936, -0.7468688488006592, 0.6048774123191833, 0.2018670290708542, 0.06985517591238022, 0.20755323767662048, 0.07985057681798935, -0.5355031490325928, -0.4350273311138153, -0.38679924607276917, 0.3251033127307892, -0.5889822244644165, -0.36676058173179626, -0.49535396695137024, -0.2329789698123932, -0.4120270907878876, -0.04590873047709465, -0.49937450885772705, -0.516010582447052, -0.42469504475593567, -0.126515194773674, 0.6828582286834717, 0.5594793558120728, -0.33250692486763, 0.2464861124753952, -0.8136749863624573, 0.23401403427124023, -0.22205710411071777, 0.4021928310394287, -0.16339832544326782, -0.5355881452560425, -0.4520297646522522, 0.10467509925365448, -0.3726342022418976, -0.7865597009658813, 0.6048665642738342, -0.08628617972135544, 0.6562455892562866, -0.03926108777523041, 0.1818159967660904, 0.8021479249000549, -0.17089010775089264, 0.9989141821861267, 0.05377791076898575, -0.7057473659515381, 0.8253522515296936, -0.22765277326107025, 0.19254669547080994, 0.6099337935447693, 0.18664497137069702, -0.4420003592967987, -0.29516279697418213, -0.9615936875343323, -1.114295482635498, 1.059809923171997, 0.5764914155006409, -0.3189152479171753, 0.06876584887504578, 0.2775658965110779, -0.053501393646001816, 0.1643531620502472, -0.7299667596817017, -0.8622598052024841, -0.059566449373960495, -0.26929473876953125, -0.015801774337887764, -0.0015991325490176678, -0.5274028778076172, -0.35658660531044006, 0.8952791094779968, 0.010377860628068447, 0.5035936832427979, 0.15961600840091705, -0.04555946961045265, -0.16234421730041504, 0.29475194215774536, 0.5022000670433044, 0.7559826970100403, -0.3833266794681549, -0.08994462341070175, 0.3087305426597595, -0.5762107372283936, 0.08013393729925156, 0.35923832654953003, -0.026114974170923233, -0.14926837384700775, 0.6177358031272888, 0.9064556360244751, 0.0665498599410057, -0.4018273651599884, 0.5269092917442322, -0.10467763990163803, -0.22343480587005615, -0.5645214319229126, 0.19635330140590668, -0.046188946813344955, 0.43880921602249146, 0.34396427869796753, -0.05110694468021393, -0.0384824201464653, -0.30849960446357727, 0.29454413056373596, 0.34129559993743896, -0.034573569893836975, -0.2819337248802185, 0.5774325132369995, -0.03774089366197586, -0.3653189241886139, 0.7566062211990356, -0.03787849843502045, -0.5596030950546265, 1.1112879514694214, 0.4232494831085205, 0.859990119934082, -0.04546254873275757, 0.0569625198841095, 0.6173236966133118, 0.304727166891098, -0.1390514373779297, 0.5684055089950562, 0.02679443359375, -0.6253867745399475, -0.3327867090702057, -0.8414112329483032, -0.2763776481151581, 0.30310189723968506, -1.0024545192718506, 0.35521113872528076, -0.12448686361312866, -0.21498511731624603, -0.20557688176631927, 0.46266424655914307, -0.8372563719749451, 0.15583062171936035, -0.03191157802939415, 0.8476312160491943, -1.0129579305648804, 0.6869469285011292, 0.8989763259887695, -0.5024871826171875, -0.8798090815544128, -0.3627490699291229, 0.038790591061115265, -0.8901092410087585, 0.5046949982643127, 0.3086838126182556, 0.3768504858016968, -0.20438173413276672, -0.5984392166137695, -1.0519108772277832, 1.4995989799499512, 0.15717482566833496, -0.5908578634262085, 0.27361512184143066, 0.08619292825460434, 0.33763059973716736, -0.3274652063846588, 0.5623886585235596, 0.7395645976066589, 0.7566748261451721, -0.052031010389328, -0.9273329973220825, 0.3248782455921173, -0.5005518794059753, -0.16410696506500244, 0.4431193470954895, -0.8763921856880188, 1.0159614086151123, -0.18332599103450775, -0.10818522423505783, -0.11610794812440872, 0.40383896231651306, 0.6164699792861938, 0.3339574933052063, 0.46275341510772705, 0.7799056172370911, 0.6724895238876343, -0.30549970269203186, 1.1612536907196045, -0.30212071537971497, 0.888859212398529, 1.1755291223526, 0.08822715282440186, 0.6878504157066345, 0.3227356970310211, -0.5668237209320068, 0.5244520902633667, 0.8041641116142273, -0.35264167189598083, 0.40917864441871643, 0.11919558048248291, -0.1009078249335289, -0.07071816176176071, 0.02388361282646656, -0.5107702612876892, 0.3831547498703003, 0.23576408624649048, -0.56756991147995, -0.15636475384235382, -0.30260998010635376, 0.12762877345085144, -0.2083127498626709, -0.22710716724395752, 0.6743139028549194, 0.02492655999958515, -0.6100831627845764, 0.7342872023582458, -0.12776033580303192, 0.7418118119239807, -0.6543336510658264, -0.1712798923254013, -0.25478455424308777, 0.32274672389030457, -0.5291264653205872, -1.0490474700927734, 0.2860119044780731, 0.06165953353047371, -0.2431747168302536, -0.1492205709218979, 0.6427841782569885, -0.305208295583725, -0.5451568365097046, 0.5160321593284607, 0.33060890436172485, 0.3414938151836395, 0.15741582214832306, -0.9773895144462585, 0.3409029543399811, 0.3011029362678528, -0.7147290706634521, 0.4780633747577667, 0.16882561147212982, 0.09207937866449356, 0.6119211316108704, 0.6913567185401917, 0.09745049476623535, 0.09864338487386703, -0.02916906401515007, 1.113023281097412, -0.7886172533035278, -0.3731163442134857, -0.8337079882621765, 0.8797144889831543, -0.24590320885181427, -0.5695024728775024, 0.8856266736984253, 0.8559481501579285, 0.8471755385398865, 0.11835943907499313, 0.7770645022392273, -0.46007469296455383, 0.46438705921173096, -0.3844273090362549, 0.8367647528648376, -0.7551017999649048, 0.31143608689308167, -0.1918787658214569, -0.8868300914764404, -0.15646865963935852, 0.7143217325210571, -0.12899067997932434, -0.0415976382791996, 0.5735828280448914, 0.9539526700973511, 0.05444010719656944, 0.12929055094718933, -0.03556564077734947, 0.402792364358902, 0.2892586886882782, 0.6159905791282654, 0.6380825042724609, -0.6769658327102661, 0.40541452169418335, -0.6944540143013, -0.4565337300300598, -0.10209270566701889, -0.6959913372993469, -0.8137636184692383, -0.5307818651199341, -0.3445949852466583, -0.5554625988006592, -0.0282003041356802, 1.104372262954712, 0.49626749753952026, -0.82855623960495, -0.40625619888305664, -0.0670204684138298, 0.21996279060840607, -0.18358635902404785, -0.3484335243701935, 0.5634672045707703, -0.1208907812833786, -0.7894287109375, 0.4189921021461487, -0.10997650027275085, -0.1947731226682663, -0.06660722941160202, -0.22194018959999084, -0.4346635639667511, -0.35338711738586426, 0.4083119034767151, 0.1972796618938446, -0.6189650893211365, -0.3087995648384094, -0.25896474719047546, 0.014869014732539654, 0.33779752254486084, 0.35358768701553345, -0.6710623502731323, 0.15551932156085968, 0.6002399325370789, 0.318484365940094, 0.7492691278457642, 0.05447747930884361, 0.22635428607463837, -0.7830178737640381, -0.020523039624094963, -0.010140816681087017, 0.5494307279586792, 0.23807293176651, -0.5212018489837646, 1.04313325881958, 0.30052685737609863, -0.8302568197250366, -0.8389957547187805, -0.20873422920703888, -1.20382821559906, -0.056280169636011124, 1.3246815204620361, -0.3391534984111786, -0.3504531681537628, 0.07851482927799225, -0.09210376441478729, 0.36619386076927185, -0.6934506297111511, 0.6193932294845581, 0.6572498083114624, -0.4440167248249054, -0.04672568663954735, -0.49338439106941223, 0.28338658809661865, 0.024187121540308, -1.1264616250991821, 0.033201467245817184, 0.35883164405822754, 0.41159480810165405, 0.28106889128685, 0.7056026458740234, -0.049047354608774185, -0.14846746623516083, 0.015339374542236328, 0.24409906566143036, -0.24009199440479279, -0.16830089688301086, -0.22847552597522736, 0.1833413541316986, -0.3794695734977722, -0.4925878643989563 ]
open-llm-leaderboard/details_h2oai__h2ogpt-oasst1-512-20b
open-llm-leaderboard
2023-10-19T03:05:50Z
200
0
[ "region:us" ]
null
2023-08-18T11:53:49Z
--- pretty_name: Evaluation run of h2oai/h2ogpt-oasst1-512-20b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [h2oai/h2ogpt-oasst1-512-20b](https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_h2oai__h2ogpt-oasst1-512-20b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T03:05:37.709537](https://huggingface.co/datasets/open-llm-leaderboard/details_h2oai__h2ogpt-oasst1-512-20b/blob/main/results_2023-10-19T03-05-37.709537.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n\ \ \"em_stderr\": 0.00037786091964609505,\n \"f1\": 0.05176384228187931,\n\ \ \"f1_stderr\": 0.0012682806127954247,\n \"acc\": 0.3560947909043528,\n\ \ \"acc_stderr\": 0.008971438537963025\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.001363255033557047,\n \"em_stderr\": 0.00037786091964609505,\n\ \ \"f1\": 0.05176384228187931,\n \"f1_stderr\": 0.0012682806127954247\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.03184230477634572,\n \ \ \"acc_stderr\": 0.004836348558260912\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6803472770323599,\n \"acc_stderr\": 0.013106528517665137\n\ \ }\n}\n```" repo_url: https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|arc:challenge|25_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T21:43:07.012781.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T03_05_37.709537 path: - '**/details_harness|drop|3_2023-10-19T03-05-37.709537.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T03-05-37.709537.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T03_05_37.709537 path: - '**/details_harness|gsm8k|5_2023-10-19T03-05-37.709537.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T03-05-37.709537.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hellaswag|10_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:43:07.012781.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:43:07.012781.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T21_43_07.012781 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T21:43:07.012781.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T21:43:07.012781.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T03_05_37.709537 path: - '**/details_harness|winogrande|5_2023-10-19T03-05-37.709537.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T03-05-37.709537.parquet' - config_name: results data_files: - split: 2023_07_19T21_43_07.012781 path: - results_2023-07-19T21:43:07.012781.parquet - split: 2023_10_19T03_05_37.709537 path: - results_2023-10-19T03-05-37.709537.parquet - split: latest path: - results_2023-10-19T03-05-37.709537.parquet --- # Dataset Card for Evaluation run of h2oai/h2ogpt-oasst1-512-20b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [h2oai/h2ogpt-oasst1-512-20b](https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_h2oai__h2ogpt-oasst1-512-20b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T03:05:37.709537](https://huggingface.co/datasets/open-llm-leaderboard/details_h2oai__h2ogpt-oasst1-512-20b/blob/main/results_2023-10-19T03-05-37.709537.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964609505, "f1": 0.05176384228187931, "f1_stderr": 0.0012682806127954247, "acc": 0.3560947909043528, "acc_stderr": 0.008971438537963025 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964609505, "f1": 0.05176384228187931, "f1_stderr": 0.0012682806127954247 }, "harness|gsm8k|5": { "acc": 0.03184230477634572, "acc_stderr": 0.004836348558260912 }, "harness|winogrande|5": { "acc": 0.6803472770323599, "acc_stderr": 0.013106528517665137 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.36523857712745667, -0.6601061820983887, 0.1964453011751175, 0.18791189789772034, -0.11569877713918686, 0.14916673302650452, -0.3252841830253601, -0.2821155786514282, 0.31662991642951965, 0.44893303513526917, -0.6444377899169922, -0.8247718811035156, -0.66447913646698, 0.06434796005487442, -0.0055377064272761345, 1.0420392751693726, -0.3272314667701721, -0.20644967257976532, 0.044146157801151276, -0.2838106155395508, -0.32410943508148193, -0.3833450675010681, -0.6198878288269043, -0.46166545152664185, 0.3429044783115387, 0.6347568035125732, 0.3344863951206207, 0.7815443873405457, 0.6953667402267456, 0.3117521107196808, -0.08806754648685455, 0.12600089609622955, -0.4923741817474365, -0.1224784255027771, 0.21015247702598572, -0.5394060611724854, -0.8461616039276123, 0.1880037933588028, 0.6921408772468567, 0.46907031536102295, -0.19848951697349548, 0.6651561260223389, 0.17592114210128784, 0.5793851613998413, -0.550470232963562, 0.3816826045513153, -0.2970429062843323, -0.022748155519366264, -0.2888782024383545, -0.12178335338830948, -0.03630305826663971, -0.23325370252132416, -0.093216672539711, -0.49986398220062256, 0.08117345720529556, 0.276496946811676, 1.1478323936462402, 0.2078133225440979, -0.18968240916728973, -0.20067566633224487, -0.29885855317115784, 0.867316722869873, -0.9096668362617493, 0.00585877476260066, 0.7022932171821594, 0.1507127285003662, -0.23836569488048553, -0.49621322751045227, -0.3443470001220703, -0.04482480138540268, -0.28659602999687195, 0.2185487002134323, 0.03285163640975952, -0.16841420531272888, 0.38591983914375305, 0.6270946860313416, -0.7128035426139832, 0.10838272422552109, -0.5851732492446899, -0.21035410463809967, 0.997015655040741, 0.3165532648563385, 0.03302403911948204, -0.48615404963493347, -0.476348340511322, -0.28900855779647827, -0.41848865151405334, 0.23633764684200287, 0.4950707256793976, 0.3976783752441406, -0.6783317923545837, 0.8329207897186279, -0.40179598331451416, 0.47285163402557373, -0.12659665942192078, -0.28811168670654297, 0.8354446291923523, -0.5724497437477112, -0.195632666349411, 0.057195164263248444, 1.072969913482666, 0.355791300535202, -0.0021339792292565107, 0.28207632899284363, -0.2222049981355667, -0.16172651946544647, 0.043649133294820786, -0.8614777326583862, -0.1330777406692505, 0.45911192893981934, -0.5938802361488342, -0.3735527992248535, 0.3319430351257324, -0.972100555896759, -0.14308053255081177, -0.20004157721996307, 0.2176641821861267, -0.203645721077919, -0.41349083185195923, -0.09304764121770859, -0.2712717652320862, 0.30447882413864136, 0.12198329716920853, -0.5861926078796387, 0.3841988444328308, 0.6547227501869202, 0.9914693832397461, -0.18956544995307922, -0.40435507893562317, -0.3653620481491089, -0.19708167016506195, -0.12264399975538254, 0.3839077651500702, -0.16013579070568085, -0.3484525680541992, -0.15189670026302338, 0.28638994693756104, -0.3019256591796875, -0.6212847828865051, 0.644980251789093, -0.22190850973129272, 0.2367388755083084, -0.24440322816371918, -0.447752982378006, -0.030022205784916878, 0.37643224000930786, -0.5818033814430237, 1.4801609516143799, 0.42868751287460327, -0.7904154658317566, 0.07412959635257721, -0.8969772458076477, -0.23284637928009033, -0.0007083628443069756, 0.03199141100049019, -0.6333022117614746, -0.20659057796001434, 0.0351964570581913, 0.4066769480705261, -0.3319469392299652, -0.03215809166431427, -0.3059559762477875, -0.3100048303604126, 0.06665194779634476, -0.04220372438430786, 1.067772626876831, 0.25508788228034973, -0.4963502287864685, 0.05656253546476364, -1.0132991075515747, 0.13575245440006256, 0.4028562605381012, -0.6520257592201233, -0.21768632531166077, -0.24636788666248322, 0.15276220440864563, 0.1849094182252884, 0.5680146217346191, -0.6648617386817932, 0.36149299144744873, -0.16364793479442596, 0.4285317659378052, 0.917216956615448, -0.007961461320519447, 0.2578926980495453, -0.3409349024295807, 0.5045705437660217, -0.02695111744105816, 0.3195023834705353, 0.0756065770983696, -0.7094067931175232, -0.6892397403717041, -0.2466052621603012, 0.13842546939849854, 0.7549198865890503, -0.4223244786262512, 0.8053139448165894, -0.35915958881378174, -0.6476855278015137, -0.7486611604690552, 0.10159993171691895, 0.5469620823860168, 0.6247309446334839, 0.40089237689971924, -0.2491299957036972, -0.6325411796569824, -0.974118173122406, 0.043896615505218506, -0.3258095979690552, -0.018006673082709312, 0.5802386403083801, 1.0993828773498535, -0.31810975074768066, 0.6833909153938293, -0.8560695052146912, -0.3213379979133606, -0.18687590956687927, 0.11571468412876129, 0.6589774489402771, 0.5896223783493042, 0.42646872997283936, -0.6068142056465149, -0.3775343894958496, -0.06842053681612015, -0.7826096415519714, -0.20433546602725983, -0.03194407373666763, -0.3495965600013733, 0.36810794472694397, -0.007922065444290638, -0.5572831034660339, 0.5584118962287903, 0.517891526222229, -0.6925178170204163, 0.6839022040367126, -0.0676146000623703, 0.40007099509239197, -1.1760417222976685, 0.28665682673454285, 0.005102721508592367, 0.013889474794268608, -0.3934524953365326, -0.03676663711667061, -0.018597804009914398, 0.28097477555274963, -0.453171044588089, 0.7411500811576843, -0.4588102400302887, -0.1970531940460205, 0.021085260435938835, 0.05745634064078331, -0.07928454130887985, 0.5583589673042297, -0.26086336374282837, 0.7458060383796692, 0.4833261966705322, -0.3185676634311676, 0.37016454339027405, 0.5005459189414978, -0.5242749452590942, 0.21827973425388336, -0.5736734867095947, 0.029586318880319595, 0.2031235247850418, 0.13677342236042023, -1.0106630325317383, -0.4688280522823334, 0.503106951713562, -0.614902675151825, 0.23489175736904144, -0.34174710512161255, -0.533065676689148, -0.5397958755493164, -0.5170772671699524, 0.33025500178337097, 0.643189549446106, -0.4675627052783966, 0.23355120420455933, 0.43948739767074585, 0.0328148752450943, -0.6193953156471252, -0.7242901921272278, -0.1844966560602188, -0.2511375844478607, -0.6583260297775269, 0.39795294404029846, -0.09670966118574142, -0.2415592521429062, 0.07463733851909637, -0.12405216693878174, -0.10058313608169556, 0.1551058441400528, 0.3951435387134552, 0.5123791694641113, -0.1676107943058014, -0.3614594042301178, -0.2364121377468109, -0.1376238614320755, 0.13049544394016266, 0.0505203977227211, 0.6892423629760742, -0.21367685496807098, -0.2601139545440674, -0.31248942017555237, 0.14025291800498962, 0.47564250230789185, -0.1919480413198471, 0.8064553737640381, 0.7901893258094788, -0.3215430676937103, 0.04570978879928589, -0.3461526036262512, -0.035582657903432846, -0.4652884304523468, 0.2998647391796112, -0.2775367200374603, -0.793359100818634, 0.7871668338775635, 0.2136307805776596, 0.09201709181070328, 0.7437953352928162, 0.5758479833602905, -0.026909828186035156, 0.7804880738258362, 0.17041203379631042, -0.16912841796875, 0.5247277021408081, -0.7918573617935181, -0.05113711580634117, -1.157839059829712, -0.41049128770828247, -0.5136900544166565, -0.40027424693107605, -0.8230758309364319, -0.3653579354286194, 0.31188470125198364, 0.2205466777086258, -0.4588804543018341, 0.5323677062988281, -0.5530900955200195, 0.16208148002624512, 0.7196948528289795, 0.19016464054584503, -0.026184218004345894, 0.02344098873436451, -0.035794105380773544, 0.32174035906791687, -0.4766312539577484, -0.41931357979774475, 1.3182623386383057, 0.3902076482772827, 0.5984012484550476, 0.02043541520833969, 0.9282194375991821, 0.30667629837989807, 0.1862238049507141, -0.6043667793273926, 0.5671373009681702, 0.059133563190698624, -0.6036466360092163, -0.33614596724510193, -0.5779547095298767, -1.0399253368377686, 0.18008656799793243, -0.046363718807697296, -0.9940174221992493, 0.10497624427080154, 0.08748788386583328, -0.11671894788742065, 0.3002859950065613, -0.5740755200386047, 0.8883888125419617, -0.2141653299331665, -0.48719578981399536, -0.035584960132837296, -0.8989842534065247, 0.3285970091819763, 0.15106050670146942, 0.32204490900039673, -0.3139960467815399, -0.03580815717577934, 1.1144728660583496, -0.6792184710502625, 0.6356078386306763, -0.2600354850292206, 0.07590944319963455, 0.47567641735076904, -0.42391785979270935, 0.5788942575454712, -0.06037852168083191, -0.2855255603790283, 0.4999822676181793, -0.20191311836242676, -0.46915656328201294, -0.2570488154888153, 0.8393973112106323, -0.8867679238319397, -0.3467887341976166, -0.42894309759140015, -0.4682149589061737, 0.27767398953437805, 0.30106717348098755, 0.3467200696468353, 0.26362043619155884, 0.08341541141271591, 0.18109449744224548, 0.24031968414783478, -0.13006816804409027, 0.5621793270111084, 0.39023369550704956, -0.15062053501605988, -0.7544738054275513, 0.7448221445083618, 0.2298469841480255, 0.19763684272766113, 0.16847851872444153, 0.1256648302078247, -0.49465441703796387, -0.3971143662929535, -0.4932039976119995, 0.3488592803478241, -0.46920883655548096, -0.2064918875694275, -0.39726459980010986, -0.24577073752880096, -0.46228721737861633, 0.05324558913707733, -0.42646554112434387, -0.4916995167732239, -0.40265023708343506, -0.20386971533298492, 0.506165087223053, 0.5282886624336243, -0.333562433719635, 0.32152003049850464, -0.7510079741477966, 0.2628253400325775, -0.14986304938793182, 0.43504178524017334, -0.16219156980514526, -0.5659777522087097, -0.45560145378112793, 0.17725864052772522, -0.4094131588935852, -0.8826518654823303, 0.5334237813949585, 0.05702328681945801, 0.6482773423194885, 0.17795796692371368, 0.0890946090221405, 0.7339044809341431, -0.25221267342567444, 1.0891486406326294, -0.0220643300563097, -0.6932390928268433, 0.8052207827568054, -0.31269320845603943, 0.16029706597328186, 0.5336741805076599, 0.29202792048454285, -0.38920772075653076, -0.2044047713279724, -0.8618032336235046, -1.1889822483062744, 1.0470911264419556, 0.5838187336921692, -0.4405249357223511, 0.2010134756565094, 0.4166005551815033, -0.09819532185792923, 0.24503852427005768, -0.5422355532646179, -0.751162588596344, -0.08373504877090454, -0.2778065800666809, -0.059088923037052155, -0.06755377352237701, -0.41094738245010376, -0.46359124779701233, 0.9434633851051331, -0.059203460812568665, 0.5813567638397217, 0.2702111005783081, -0.03895943611860275, -0.12050580233335495, 0.24404360353946686, 0.47356945276260376, 0.6708957552909851, -0.5045796632766724, -0.03217092901468277, 0.17005132138729095, -0.6708171367645264, 0.1052565723657608, 0.25727882981300354, -0.06454057991504669, -0.1708277314901352, 0.6566242575645447, 1.0296868085861206, -0.05922619253396988, -0.47782525420188904, 0.4917617440223694, 0.13932479918003082, -0.3325379192829132, -0.48822611570358276, 0.1712597757577896, -0.0997210443019867, 0.2885216772556305, 0.430907279253006, -0.05400468409061432, 0.06813771277666092, -0.45014989376068115, 0.23387593030929565, 0.24110323190689087, -0.19205212593078613, -0.31806373596191406, 0.6070700883865356, -0.01959998346865177, -0.38695839047431946, 0.8315520882606506, -0.21736615896224976, -0.588307797908783, 1.1700259447097778, 0.3356538414955139, 0.8742061853408813, -0.1481890231370926, 0.058741942048072815, 0.6616743803024292, 0.3472912013530731, -0.12775884568691254, 0.6305176615715027, 0.12410328537225723, -0.6033605933189392, -0.17257343232631683, -0.7567244172096252, -0.19612248241901398, 0.37870875000953674, -1.1178677082061768, 0.3425120413303375, -0.11931395530700684, -0.2781352996826172, -0.09047911316156387, 0.3412846624851227, -0.8061293363571167, 0.17461776733398438, 0.06772057712078094, 0.9295375347137451, -0.9973087310791016, 0.6617296934127808, 0.8903745412826538, -0.4486951529979706, -0.7866941094398499, -0.19292663037776947, 0.10500150918960571, -0.9125789999961853, 0.41491326689720154, 0.2687259912490845, 0.42629945278167725, -0.1455574333667755, -0.6400112509727478, -1.0723000764846802, 1.5531998872756958, 0.06125887110829353, -0.5451403856277466, 0.16009482741355896, 0.2546507716178894, 0.4197566509246826, -0.32047000527381897, 0.5273245573043823, 0.8022645711898804, 0.8030912280082703, -0.050510093569755554, -0.9938952326774597, 0.358702689409256, -0.5025884509086609, -0.16831527650356293, 0.3161414563655853, -0.9425832033157349, 1.028266191482544, -0.290700227022171, -0.01957075670361519, -0.0417560376226902, 0.3038274049758911, 0.5203230977058411, 0.4222484827041626, 0.46428245306015015, 0.7500304579734802, 0.6709022521972656, -0.3722616136074066, 1.0570807456970215, -0.3298788070678711, 0.8698908090591431, 1.1202101707458496, -0.02497541718184948, 0.8852366805076599, 0.41737431287765503, -0.45758140087127686, 0.48328596353530884, 0.8498325943946838, -0.4296221435070038, 0.4615558385848999, 0.17152053117752075, -0.0057048615999519825, -0.016017260029911995, 0.0013320300495252013, -0.4589555561542511, 0.4176032841205597, 0.14449286460876465, -0.5229869484901428, -0.10411975532770157, -0.30312177538871765, 0.15831568837165833, -0.4045214354991913, -0.19415511190891266, 0.6596063375473022, -0.03146495670080185, -0.5885263085365295, 0.76570725440979, -0.05502086132764816, 0.6252244114875793, -0.5696933269500732, -0.13735629618167877, -0.1509975790977478, 0.2511589825153351, -0.5341002941131592, -1.0171115398406982, 0.22960683703422546, 0.037380922585725784, -0.23441550135612488, -0.16776299476623535, 0.6083694696426392, -0.35130763053894043, -0.47226619720458984, 0.46879470348358154, 0.3929726481437683, 0.34146687388420105, 0.07166796922683716, -0.875614583492279, 0.16593828797340393, 0.2230379730463028, -0.810957133769989, 0.41913044452667236, 0.13127562403678894, 0.08068180084228516, 0.5270462036132812, 0.6577802896499634, 0.06987116485834122, 0.09251918643712997, -0.1485857218503952, 0.9790557622909546, -0.7531752586364746, -0.35551443696022034, -0.8507614135742188, 0.9590634107589722, -0.319898396730423, -0.6947969198226929, 0.7858332991600037, 1.0803130865097046, 0.8353140950202942, 0.050889067351818085, 0.8642838597297668, -0.5162355303764343, 0.42866644263267517, -0.3904832899570465, 0.8672361373901367, -0.756935179233551, 0.23035430908203125, -0.06653960049152374, -0.782821536064148, 0.06382040679454803, 0.6148527264595032, -0.13684764504432678, -0.08988622575998306, 0.6047852039337158, 0.9295888543128967, 0.053129445761442184, 0.024114517495036125, -0.12060306966304779, 0.46550026535987854, 0.3130960762500763, 0.5747961401939392, 0.5724303722381592, -0.76824551820755, 0.3838137090206146, -0.572863757610321, -0.5161544680595398, -0.23718339204788208, -0.6707913875579834, -0.7566243410110474, -0.5286920666694641, -0.4287174940109253, -0.5826842784881592, 0.0004841495829168707, 1.0807734727859497, 0.5735376477241516, -0.8654496669769287, -0.4675880968570709, 0.008510876446962357, 0.1972174048423767, -0.2508910000324249, -0.36531007289886475, 0.5553549528121948, -0.10649720579385757, -0.7561606168746948, 0.33618593215942383, -0.0951233059167862, -0.10193515568971634, -0.009169945493340492, -0.322744220495224, -0.38044479489326477, -0.20701441168785095, 0.4959266185760498, 0.21303245425224304, -0.6530364751815796, -0.32734209299087524, -0.12378271669149399, -0.06620390713214874, 0.3577110767364502, 0.3051171600818634, -0.45777714252471924, 0.01122767198830843, 0.6722453832626343, 0.2299424111843109, 0.6960903406143188, 0.07527604699134827, 0.17436455190181732, -0.7052745819091797, -0.03665894642472267, -0.05171314999461174, 0.5277149677276611, 0.08900006115436554, -0.4597957134246826, 1.046810269355774, 0.4147442579269409, -0.7186197638511658, -0.9326804280281067, -0.1623934507369995, -1.241309404373169, -0.04399968311190605, 1.5403320789337158, -0.3625863194465637, -0.4113965928554535, 0.08189455419778824, -0.23218375444412231, 0.34252339601516724, -0.7543635368347168, 0.5389330983161926, 0.6992917060852051, -0.39067286252975464, 0.0664089173078537, -0.640869677066803, 0.35845133662223816, 0.014163319952785969, -1.0456222295761108, -0.08072438091039658, 0.298683226108551, 0.4209425747394562, 0.2972947657108307, 0.6200053691864014, -0.00644069816917181, -0.16227766871452332, 0.026086680591106415, 0.26553454995155334, -0.2537557780742645, -0.1393795609474182, -0.20342962443828583, 0.1910986602306366, -0.3685101866722107, -0.5883637070655823 ]
open-llm-leaderboard/details_h2oai__h2ogpt-gm-oasst1-multilang-1024-20b
open-llm-leaderboard
2023-10-21T21:24:58Z
200
0
[ "region:us" ]
null
2023-08-18T11:54:06Z
--- pretty_name: Evaluation run of h2oai/h2ogpt-gm-oasst1-multilang-1024-20b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [h2oai/h2ogpt-gm-oasst1-multilang-1024-20b](https://huggingface.co/h2oai/h2ogpt-gm-oasst1-multilang-1024-20b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_h2oai__h2ogpt-gm-oasst1-multilang-1024-20b\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-21T21:24:46.417181](https://huggingface.co/datasets/open-llm-leaderboard/details_h2oai__h2ogpt-gm-oasst1-multilang-1024-20b/blob/main/results_2023-10-21T21-24-46.417181.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.003355704697986577,\n\ \ \"em_stderr\": 0.0005922452850005271,\n \"f1\": 0.056043414429530265,\n\ \ \"f1_stderr\": 0.0013596034176909157,\n \"acc\": 0.3531399801217468,\n\ \ \"acc_stderr\": 0.008551128750555435\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.003355704697986577,\n \"em_stderr\": 0.0005922452850005271,\n\ \ \"f1\": 0.056043414429530265,\n \"f1_stderr\": 0.0013596034176909157\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.021986353297952996,\n \ \ \"acc_stderr\": 0.004039162758110061\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6842936069455406,\n \"acc_stderr\": 0.01306309474300081\n\ \ }\n}\n```" repo_url: https://huggingface.co/h2oai/h2ogpt-gm-oasst1-multilang-1024-20b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|arc:challenge|25_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-19T21:26:27.370097.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_21T21_24_46.417181 path: - '**/details_harness|drop|3_2023-10-21T21-24-46.417181.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-21T21-24-46.417181.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_21T21_24_46.417181 path: - '**/details_harness|gsm8k|5_2023-10-21T21-24-46.417181.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-21T21-24-46.417181.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hellaswag|10_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:26:27.370097.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T21:26:27.370097.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_19T21_26_27.370097 path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T21:26:27.370097.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-19T21:26:27.370097.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_21T21_24_46.417181 path: - '**/details_harness|winogrande|5_2023-10-21T21-24-46.417181.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-21T21-24-46.417181.parquet' - config_name: results data_files: - split: 2023_07_19T21_26_27.370097 path: - results_2023-07-19T21:26:27.370097.parquet - split: 2023_10_21T21_24_46.417181 path: - results_2023-10-21T21-24-46.417181.parquet - split: latest path: - results_2023-10-21T21-24-46.417181.parquet --- # Dataset Card for Evaluation run of h2oai/h2ogpt-gm-oasst1-multilang-1024-20b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/h2oai/h2ogpt-gm-oasst1-multilang-1024-20b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [h2oai/h2ogpt-gm-oasst1-multilang-1024-20b](https://huggingface.co/h2oai/h2ogpt-gm-oasst1-multilang-1024-20b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_h2oai__h2ogpt-gm-oasst1-multilang-1024-20b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T21:24:46.417181](https://huggingface.co/datasets/open-llm-leaderboard/details_h2oai__h2ogpt-gm-oasst1-multilang-1024-20b/blob/main/results_2023-10-21T21-24-46.417181.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.003355704697986577, "em_stderr": 0.0005922452850005271, "f1": 0.056043414429530265, "f1_stderr": 0.0013596034176909157, "acc": 0.3531399801217468, "acc_stderr": 0.008551128750555435 }, "harness|drop|3": { "em": 0.003355704697986577, "em_stderr": 0.0005922452850005271, "f1": 0.056043414429530265, "f1_stderr": 0.0013596034176909157 }, "harness|gsm8k|5": { "acc": 0.021986353297952996, "acc_stderr": 0.004039162758110061 }, "harness|winogrande|5": { "acc": 0.6842936069455406, "acc_stderr": 0.01306309474300081 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3909500241279602, -0.6687905788421631, 0.17434683442115784, 0.21271659433841705, -0.1322469413280487, 0.19871334731578827, -0.41557615995407104, -0.3037600815296173, 0.32862651348114014, 0.4249119162559509, -0.6171637177467346, -0.8120111227035522, -0.6983616352081299, 0.10879773646593094, 0.0726042091846466, 1.0772578716278076, -0.3189542293548584, -0.1522522270679474, 0.09035399556159973, -0.3311074376106262, -0.28555694222450256, -0.4623478055000305, -0.6055256724357605, -0.40911462903022766, 0.29785096645355225, 0.6717835068702698, 0.3335799276828766, 0.6825112700462341, 0.6168736219406128, 0.3324281871318817, -0.07330773770809174, 0.14759917557239532, -0.44508758187294006, -0.10219818353652954, 0.2536679208278656, -0.48342326283454895, -0.8541359901428223, 0.18042826652526855, 0.6988966464996338, 0.4203781187534332, -0.19501109421253204, 0.5889807939529419, 0.18354111909866333, 0.5438172817230225, -0.451977401971817, 0.43762892484664917, -0.3259880244731903, -0.08263830095529556, -0.30212628841400146, -0.13062900304794312, -0.05517629161477089, -0.22853511571884155, -0.12316174805164337, -0.5037664175033569, 0.04171227291226387, 0.21003112196922302, 1.1060426235198975, 0.10269709676504135, -0.2395993024110794, -0.22876541316509247, -0.22516539692878723, 0.9055389761924744, -0.931556761264801, 0.06561113893985748, 0.6942406296730042, 0.1521834284067154, -0.22944152355194092, -0.4448482394218445, -0.35446012020111084, -0.10323086380958557, -0.3340547978878021, 0.22960296273231506, 0.07797476649284363, -0.14285463094711304, 0.3848534822463989, 0.6141919493675232, -0.7139376997947693, 0.05664534121751785, -0.5797190070152283, -0.28052690625190735, 0.976189911365509, 0.23235534131526947, 0.06178586184978485, -0.5001842379570007, -0.41558435559272766, -0.2705267667770386, -0.4522707462310791, 0.2111954689025879, 0.49485594034194946, 0.46145936846733093, -0.6908851265907288, 0.796432614326477, -0.42669394612312317, 0.5015400052070618, -0.15067432820796967, -0.34090012311935425, 0.8634986281394958, -0.6315256953239441, -0.14149577915668488, 0.009220597334206104, 1.1369913816452026, 0.30687952041625977, 0.03080940619111061, 0.2345266044139862, -0.21314652264118195, -0.1675146222114563, -0.04278178513050079, -0.8380992412567139, -0.07835168391466141, 0.43395906686782837, -0.5475163459777832, -0.3180803656578064, 0.2595246732234955, -0.9866283535957336, -0.09772219508886337, -0.23118387162685394, 0.1993330717086792, -0.24721631407737732, -0.42814165353775024, -0.09570877254009247, -0.19467470049858093, 0.26225006580352783, 0.09799210727214813, -0.6388016939163208, 0.3283720016479492, 0.6284988522529602, 1.002778172492981, -0.1444941759109497, -0.4722713232040405, -0.4131811857223511, -0.2405320405960083, -0.07389021664857864, 0.3910200595855713, -0.1586647778749466, -0.30973881483078003, -0.16079199314117432, 0.32232266664505005, -0.32336920499801636, -0.5860785245895386, 0.6381968855857849, -0.18688157200813293, 0.2760102450847626, -0.2364974021911621, -0.39350947737693787, -0.025988606736063957, 0.4252092242240906, -0.5679506063461304, 1.4455125331878662, 0.40762925148010254, -0.7779162526130676, 0.07717757672071457, -0.8896283507347107, -0.3101325035095215, -0.01762186363339424, -0.017060548067092896, -0.5690020322799683, -0.20112289488315582, 0.09573812037706375, 0.4218854308128357, -0.3319791555404663, 0.053156133741140366, -0.25255095958709717, -0.28316760063171387, 0.03920590132474899, -0.10366062819957733, 1.0661171674728394, 0.26968255639076233, -0.48970043659210205, 0.07659750431776047, -1.0173823833465576, 0.15993782877922058, 0.3607606887817383, -0.6943005323410034, -0.19133789837360382, -0.2434351146221161, 0.1661246418952942, 0.29423099756240845, 0.5322109460830688, -0.6616625785827637, 0.39443859457969666, -0.19456158578395844, 0.42758724093437195, 0.8751369714736938, -0.07616004347801208, 0.25320735573768616, -0.3071650266647339, 0.5336154699325562, -0.03142180293798447, 0.3178507685661316, 0.003242501290515065, -0.7525148391723633, -0.6905636787414551, -0.26826369762420654, 0.13636407256126404, 0.771773636341095, -0.5455870032310486, 0.7117245197296143, -0.37817952036857605, -0.5976366996765137, -0.8396496772766113, 0.11602987349033356, 0.5924419164657593, 0.5719729661941528, 0.4264066815376282, -0.25662127137184143, -0.6048579812049866, -0.9587025046348572, 0.021143201738595963, -0.3031744956970215, 0.03310592100024223, 0.5686253905296326, 1.014883279800415, -0.3166818916797638, 0.6205710172653198, -0.7498351335525513, -0.31580033898353577, -0.24876855313777924, 0.1230577826499939, 0.6719719767570496, 0.5789240002632141, 0.4404548704624176, -0.5865222215652466, -0.4369795322418213, 0.022305576130747795, -0.7835088968276978, -0.2070489525794983, -0.04709717631340027, -0.2903565466403961, 0.3967256546020508, 0.008972864598035812, -0.5218612551689148, 0.46464431285858154, 0.6273189187049866, -0.6393890380859375, 0.7020192742347717, -0.08312729746103287, 0.4576111435890198, -1.2151849269866943, 0.2605287730693817, -0.028290702030062675, 0.018581971526145935, -0.4350162148475647, -0.013815461657941341, 0.019010348245501518, 0.284564346075058, -0.4854397475719452, 0.7607681155204773, -0.4967350661754608, -0.16491557657718658, 0.023335831239819527, 0.06946995854377747, -0.07290069013834, 0.5860847234725952, -0.21568913757801056, 0.8131961822509766, 0.5480461716651917, -0.3345424234867096, 0.37295350432395935, 0.48132529854774475, -0.5633904337882996, 0.19452811777591705, -0.5481932759284973, 0.016042135655879974, 0.20543773472309113, 0.10118968039751053, -0.9866122007369995, -0.409890353679657, 0.4794665575027466, -0.6143549680709839, 0.2834341526031494, -0.38450223207473755, -0.5578702688217163, -0.4982661008834839, -0.5033314824104309, 0.3114961087703705, 0.5462158918380737, -0.4570121765136719, 0.20279976725578308, 0.45644477009773254, -0.03733600303530693, -0.6571850776672363, -0.7574653625488281, -0.12358804792165756, -0.23212005198001862, -0.6983847618103027, 0.34383246302604675, -0.1137673482298851, -0.18275679647922516, -0.00026639236602932215, -0.08888711035251617, -0.11216779798269272, 0.17966048419475555, 0.2890351414680481, 0.46779340505599976, -0.1940743327140808, -0.33430367708206177, -0.209095761179924, -0.15197056531906128, 0.04232806712388992, 0.05428963527083397, 0.7062084078788757, -0.26082924008369446, -0.25441107153892517, -0.28398916125297546, 0.20632760226726532, 0.5042319297790527, -0.226101815700531, 0.8363484740257263, 0.7928400039672852, -0.2370230108499527, 0.12266787886619568, -0.3349776864051819, -0.02179478481411934, -0.4656064510345459, 0.335462749004364, -0.3453831076622009, -0.8529875874519348, 0.7869997620582581, 0.2053045630455017, 0.07438135892152786, 0.7297249436378479, 0.5848903059959412, 0.07187306135892868, 0.8188686966896057, 0.1740935742855072, -0.20948828756809235, 0.5081024169921875, -0.7448679208755493, -0.0172981396317482, -1.1840633153915405, -0.40340691804885864, -0.5285542607307434, -0.31794822216033936, -0.8657101392745972, -0.3756534159183502, 0.30410560965538025, 0.20749600231647491, -0.4125635623931885, 0.5268995761871338, -0.5390358567237854, 0.19973143935203552, 0.7225049138069153, 0.10293291509151459, 0.031811825931072235, 0.03809504210948944, -0.09787026792764664, 0.2833845615386963, -0.4919056296348572, -0.45309484004974365, 1.3088679313659668, 0.36132586002349854, 0.6312763094902039, 0.06009405478835106, 0.8704966902732849, 0.2256210893392563, 0.21833215653896332, -0.6395962834358215, 0.5266039371490479, 0.01257153507322073, -0.6073260307312012, -0.29844024777412415, -0.6072976589202881, -1.0090588331222534, 0.24107033014297485, -0.015407013706862926, -1.0113054513931274, 0.0566951148211956, 0.048912905156612396, -0.0703330710530281, 0.26617005467414856, -0.6195949912071228, 0.9534046649932861, -0.30429181456565857, -0.4014248847961426, 0.009473959915339947, -0.9169215559959412, 0.36000195145606995, 0.11472518742084503, 0.3779609799385071, -0.3197912275791168, 0.029390249401330948, 1.0541880130767822, -0.588530421257019, 0.6366844773292542, -0.23920802772045135, 0.040649957954883575, 0.3970608413219452, -0.40419358015060425, 0.5697863101959229, -0.027723075821995735, -0.3211590647697449, 0.5422553420066833, -0.1781480610370636, -0.40826499462127686, -0.22620312869548798, 0.8597935438156128, -0.8691574931144714, -0.36080870032310486, -0.45633041858673096, -0.4705428183078766, 0.21847045421600342, 0.31173649430274963, 0.37593787908554077, 0.2739952802658081, 0.06616190075874329, 0.14813993871212006, 0.17878217995166779, -0.20549072325229645, 0.5177443623542786, 0.44205397367477417, -0.18583901226520538, -0.7470219731330872, 0.7597277164459229, 0.21810868382453918, 0.25293412804603577, 0.1756817251443863, 0.14317525923252106, -0.4968796372413635, -0.38118991255760193, -0.5791437029838562, 0.33267614245414734, -0.49023810029029846, -0.19999590516090393, -0.4453279972076416, -0.22901904582977295, -0.506986677646637, 0.11797890067100525, -0.4315211772918701, -0.5322445034980774, -0.3302864134311676, -0.17858996987342834, 0.5109743475914001, 0.5049930810928345, -0.2991039752960205, 0.3198562562465668, -0.8205200433731079, 0.2853907644748688, -0.14796805381774902, 0.45911189913749695, -0.16407153010368347, -0.5924009680747986, -0.48372364044189453, 0.21391570568084717, -0.4104647636413574, -0.8880971074104309, 0.5587629079818726, 0.1477295309305191, 0.6907650828361511, 0.19076435267925262, 0.046423036605119705, 0.7596754431724548, -0.3162243962287903, 1.060441493988037, -0.017466653138399124, -0.7522315979003906, 0.7889761328697205, -0.2819789946079254, 0.19448137283325195, 0.562696635723114, 0.2768717110157013, -0.5146499276161194, -0.27465271949768066, -0.7820271849632263, -1.1760772466659546, 1.1218788623809814, 0.5316324234008789, -0.36645975708961487, 0.13293208181858063, 0.3299304246902466, -0.06374602764844894, 0.2593291401863098, -0.5944485664367676, -0.7673752903938293, -0.11460152268409729, -0.2579755187034607, -0.10865909606218338, -0.09068707376718521, -0.37805265188217163, -0.4824367165565491, 0.9470708966255188, -0.09481276571750641, 0.5793306827545166, 0.19746986031532288, -0.059545911848545074, -0.06374196708202362, 0.34717538952827454, 0.524722695350647, 0.6505337953567505, -0.42489251494407654, -0.002386195817962289, 0.16496531665325165, -0.6468645334243774, 0.10773156583309174, 0.2820361852645874, -0.05245167762041092, -0.12305603176355362, 0.6336122751235962, 1.0790209770202637, -0.06499870121479034, -0.49581378698349, 0.5062236785888672, 0.10388137400150299, -0.2834412455558777, -0.40229684114456177, 0.0670415610074997, -0.08324916660785675, 0.24623125791549683, 0.3991946876049042, -0.09483862668275833, 0.0534050427377224, -0.503252387046814, 0.2557486593723297, 0.25246745347976685, -0.16194915771484375, -0.3606925308704376, 0.5782211422920227, -0.008232371881604195, -0.3153301179409027, 0.8425860404968262, -0.30145424604415894, -0.6539823412895203, 1.0972704887390137, 0.3890220820903778, 0.8404204249382019, -0.21956010162830353, 0.10770709067583084, 0.6650401949882507, 0.40432170033454895, -0.12844860553741455, 0.6643330454826355, 0.1533305048942566, -0.6375812888145447, -0.2971649467945099, -0.7381619811058044, -0.15344561636447906, 0.35109859704971313, -1.0293034315109253, 0.32852786779403687, -0.06846791505813599, -0.24173200130462646, -0.09523884207010269, 0.30502066016197205, -0.7769931554794312, 0.1292388141155243, 0.06430681049823761, 0.8738270998001099, -0.9982990622520447, 0.7599624991416931, 0.8653436303138733, -0.45103153586387634, -0.7756018042564392, -0.2182159423828125, 0.13057273626327515, -0.9114636182785034, 0.42202985286712646, 0.2977866530418396, 0.42793169617652893, -0.1960425227880478, -0.557745635509491, -1.085208773612976, 1.4395923614501953, 0.13052454590797424, -0.5361291170120239, 0.2092646360397339, 0.2561216354370117, 0.4106972813606262, -0.32373934984207153, 0.5469169616699219, 0.7934921979904175, 0.827450692653656, -0.0536809004843235, -1.088470458984375, 0.3363356292247772, -0.5059940218925476, -0.22300170361995697, 0.3448556363582611, -0.9307934045791626, 1.0492701530456543, -0.2940148115158081, -0.041550345718860626, -0.05606834962964058, 0.24348464608192444, 0.4923263192176819, 0.3704608082771301, 0.38567882776260376, 0.7069239020347595, 0.6743376851081848, -0.3692605495452881, 1.0843417644500732, -0.3509993255138397, 0.8910123705863953, 1.1535996198654175, -0.016665780916810036, 0.8327549695968628, 0.43020719289779663, -0.4361291229724884, 0.4284418821334839, 0.7926058173179626, -0.3616445064544678, 0.47822991013526917, 0.15330596268177032, -0.004053027369081974, -0.016736840829253197, -0.002389120403677225, -0.47411665320396423, 0.4680901765823364, 0.16424447298049927, -0.4848356246948242, -0.11630938947200775, -0.25369828939437866, 0.2011892944574356, -0.42519712448120117, -0.14349788427352905, 0.6747875213623047, -0.030839387327432632, -0.6196776628494263, 0.7872259020805359, -0.0021047780755907297, 0.696025550365448, -0.6174408793449402, -0.11692918837070465, -0.20444820821285248, 0.21988706290721893, -0.48906412720680237, -1.0042270421981812, 0.2701871991157532, 0.05471251159906387, -0.1956794261932373, -0.17568881809711456, 0.5458245277404785, -0.44768640398979187, -0.5011978149414062, 0.5099515318870544, 0.4439970552921295, 0.38039058446884155, 0.0263660978525877, -0.9187958240509033, 0.1478790044784546, 0.2563391327857971, -0.7755687832832336, 0.4440893828868866, 0.18750062584877014, 0.060624007135629654, 0.5212239027023315, 0.661096453666687, 0.10526634752750397, 0.12164363265037537, -0.060498468577861786, 0.9538300633430481, -0.7265415191650391, -0.317195862531662, -0.860418438911438, 0.9221594929695129, -0.2991166114807129, -0.6627217531204224, 0.8421548008918762, 1.0458276271820068, 0.8668489456176758, 0.024664781987667084, 0.9113935232162476, -0.5419017672538757, 0.4031505882740021, -0.41870495676994324, 0.8680186867713928, -0.7910529375076294, 0.23951692879199982, -0.09670937061309814, -0.7724301218986511, -0.020481185987591743, 0.6519464254379272, -0.1305331438779831, -0.07772213965654373, 0.6324167847633362, 0.9203460216522217, 0.02738431841135025, -0.05334722250699997, -0.05717313289642334, 0.45349064469337463, 0.3149487376213074, 0.6249874830245972, 0.5814124345779419, -0.7733746767044067, 0.4079042077064514, -0.5534916520118713, -0.4832648038864136, -0.19797885417938232, -0.6793997883796692, -0.7306933403015137, -0.5425587296485901, -0.39914608001708984, -0.5498515367507935, -0.050437282770872116, 1.098152756690979, 0.5544928312301636, -0.9276144504547119, -0.4834613800048828, 0.09189969301223755, 0.2400841861963272, -0.2629045844078064, -0.3385394215583801, 0.5654367804527283, -0.16029347479343414, -0.7938150763511658, 0.4007892310619354, -0.047735538333654404, -0.11682131141424179, 0.013706057332456112, -0.36746925115585327, -0.42402783036231995, -0.2617006301879883, 0.5549835562705994, 0.20754344761371613, -0.682063102722168, -0.28640317916870117, -0.0955728217959404, -0.08298814296722412, 0.38610026240348816, 0.3406829535961151, -0.422777384519577, 0.11488430202007294, 0.6370089054107666, 0.21368466317653656, 0.6272160410881042, 0.027239568531513214, 0.19209696352481842, -0.7764914035797119, 0.049405183643102646, -0.06462734937667847, 0.5639530420303345, 0.14531812071800232, -0.4364589750766754, 1.0000029802322388, 0.384453684091568, -0.6898342370986938, -0.8434841632843018, -0.13982143998146057, -1.2108606100082397, -0.045485720038414, 1.5894548892974854, -0.374405175447464, -0.31841129064559937, 0.012745299376547337, -0.2590485215187073, 0.3349567949771881, -0.7619758248329163, 0.5284493565559387, 0.7524376511573792, -0.31889811158180237, 0.04570143669843674, -0.6582032442092896, 0.39001956582069397, 0.01823798380792141, -1.0556327104568481, -0.0485859215259552, 0.29328280687332153, 0.39085596799850464, 0.292323499917984, 0.6948073506355286, -0.03711555153131485, -0.171982541680336, -0.04534396156668663, 0.28900039196014404, -0.2150433510541916, -0.1662178933620453, -0.23811590671539307, 0.0991889163851738, -0.31520894169807434, -0.5530028939247131 ]
open-llm-leaderboard/details_heegyu__WizardVicuna-Uncensored-3B-0719
open-llm-leaderboard
2023-10-19T03:10:12Z
200
0
[ "region:us" ]
null
2023-08-18T12:02:47Z
--- pretty_name: Evaluation run of heegyu/WizardVicuna-Uncensored-3B-0719 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [heegyu/WizardVicuna-Uncensored-3B-0719](https://huggingface.co/heegyu/WizardVicuna-Uncensored-3B-0719)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_heegyu__WizardVicuna-Uncensored-3B-0719\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-19T03:10:00.849734](https://huggingface.co/datasets/open-llm-leaderboard/details_heegyu__WizardVicuna-Uncensored-3B-0719/blob/main/results_2023-10-19T03-10-00.849734.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0032508389261744967,\n\ \ \"em_stderr\": 0.0005829486708558908,\n \"f1\": 0.05307046979865784,\n\ \ \"f1_stderr\": 0.0013744215109358906,\n \"acc\": 0.32454958283792285,\n\ \ \"acc_stderr\": 0.008214760837520624\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0032508389261744967,\n \"em_stderr\": 0.0005829486708558908,\n\ \ \"f1\": 0.05307046979865784,\n \"f1_stderr\": 0.0013744215109358906\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.011372251705837756,\n \ \ \"acc_stderr\": 0.002920666198788741\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.6377269139700079,\n \"acc_stderr\": 0.013508855476252508\n\ \ }\n}\n```" repo_url: https://huggingface.co/heegyu/WizardVicuna-Uncensored-3B-0719 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: [email protected] configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|arc:challenge|25_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T10:29:51.933578.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_19T03_10_00.849734 path: - '**/details_harness|drop|3_2023-10-19T03-10-00.849734.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-19T03-10-00.849734.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_19T03_10_00.849734 path: - '**/details_harness|gsm8k|5_2023-10-19T03-10-00.849734.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-19T03-10-00.849734.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hellaswag|10_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:29:51.933578.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T10:29:51.933578.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T10_29_51.933578 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T10:29:51.933578.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T10:29:51.933578.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_19T03_10_00.849734 path: - '**/details_harness|winogrande|5_2023-10-19T03-10-00.849734.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-19T03-10-00.849734.parquet' - config_name: results data_files: - split: 2023_07_24T10_29_51.933578 path: - results_2023-07-24T10:29:51.933578.parquet - split: 2023_10_19T03_10_00.849734 path: - results_2023-10-19T03-10-00.849734.parquet - split: latest path: - results_2023-10-19T03-10-00.849734.parquet --- # Dataset Card for Evaluation run of heegyu/WizardVicuna-Uncensored-3B-0719 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/heegyu/WizardVicuna-Uncensored-3B-0719 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [heegyu/WizardVicuna-Uncensored-3B-0719](https://huggingface.co/heegyu/WizardVicuna-Uncensored-3B-0719) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_heegyu__WizardVicuna-Uncensored-3B-0719", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T03:10:00.849734](https://huggingface.co/datasets/open-llm-leaderboard/details_heegyu__WizardVicuna-Uncensored-3B-0719/blob/main/results_2023-10-19T03-10-00.849734.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0032508389261744967, "em_stderr": 0.0005829486708558908, "f1": 0.05307046979865784, "f1_stderr": 0.0013744215109358906, "acc": 0.32454958283792285, "acc_stderr": 0.008214760837520624 }, "harness|drop|3": { "em": 0.0032508389261744967, "em_stderr": 0.0005829486708558908, "f1": 0.05307046979865784, "f1_stderr": 0.0013744215109358906 }, "harness|gsm8k|5": { "acc": 0.011372251705837756, "acc_stderr": 0.002920666198788741 }, "harness|winogrande|5": { "acc": 0.6377269139700079, "acc_stderr": 0.013508855476252508 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4082106351852417, -0.6690877079963684, 0.174336776137352, 0.25198161602020264, -0.1392529010772705, 0.023664433509111404, -0.3553875982761383, -0.1992325782775879, 0.3066430687904358, 0.6462044715881348, -0.6026638150215149, -0.9613796472549438, -0.636749267578125, 0.2475239336490631, -0.11107093840837479, 1.1807156801223755, -0.2832692265510559, -0.1999068707227707, 0.0460604690015316, -0.2890772819519043, -0.43008095026016235, -0.3157140910625458, -0.44891440868377686, -0.5192901492118835, 0.4115438759326935, 0.6384126543998718, 0.41525277495384216, 0.6019411683082581, 0.5471158027648926, 0.38118776679039, -0.04447105526924133, 0.2677861154079437, -0.478622168302536, -0.15249624848365784, 0.2539162039756775, -0.5844153761863708, -0.8144999742507935, 0.18468444049358368, 0.6884803175926208, 0.4965227246284485, -0.14554333686828613, 0.65896075963974, 0.2168063074350357, 0.6773431301116943, -0.5256232619285583, 0.45943745970726013, -0.29983457922935486, -0.004668707028031349, -0.43525251746177673, -0.18827207386493683, -0.0013112372253090143, -0.39060699939727783, -0.1586327701807022, -0.6626443266868591, 0.14434999227523804, 0.21065615117549896, 1.0566387176513672, 0.11986555904150009, -0.2146204113960266, -0.1202060878276825, -0.35758498311042786, 0.7441749572753906, -0.8859933018684387, -0.03833677992224693, 0.7563017010688782, 0.13420641422271729, -0.35003942251205444, -0.4974685609340668, -0.44109418988227844, -0.02223767712712288, -0.1797938197851181, 0.19596852362155914, -0.03947274759411812, -0.15465103089809418, 0.40309223532676697, 0.6057124733924866, -0.6603882312774658, 0.01738230139017105, -0.5910778641700745, -0.15244024991989136, 0.9999605417251587, 0.36809042096138, 0.15157951414585114, -0.587394654750824, -0.2563278079032898, -0.27183735370635986, -0.39187943935394287, 0.1908208429813385, 0.5985385775566101, 0.42316654324531555, -0.5948019623756409, 0.8976698517799377, -0.4367995262145996, 0.54154372215271, -0.0649312287569046, -0.3160983622074127, 0.8236615657806396, -0.45712509751319885, -0.2921713590621948, -0.02802671678364277, 1.1420378684997559, 0.4616958498954773, 0.0629342645406723, 0.1398390233516693, -0.20814834535121918, -0.15464048087596893, 0.10686206817626953, -0.7042078375816345, -0.24033163487911224, 0.38583797216415405, -0.6517882943153381, -0.44177964329719543, 0.25192636251449585, -0.9348450899124146, -0.1716594696044922, -0.22708280384540558, 0.21625526249408722, -0.23936982452869415, -0.46025893092155457, -0.0803479328751564, -0.13500593602657318, 0.31508544087409973, 0.16580697894096375, -0.6770782470703125, 0.3419804573059082, 0.5680859684944153, 0.9161538481712341, -0.17133024334907532, -0.43251705169677734, -0.26647692918777466, -0.1399594396352768, -0.21105888485908508, 0.47730857133865356, -0.1408238410949707, -0.4907413125038147, -0.17501363158226013, 0.35150548815727234, -0.3067079782485962, -0.5654096603393555, 0.7090693712234497, -0.3351386487483978, 0.1967744380235672, -0.22500798106193542, -0.4807649552822113, -0.11855537444353104, 0.33455032110214233, -0.7099233269691467, 1.4561820030212402, 0.39298585057258606, -0.9832594990730286, 0.08502707630395889, -0.8212071061134338, -0.13325221836566925, 0.061559442430734634, 0.05192757770419121, -0.5820838212966919, -0.18035930395126343, 0.1077205017209053, 0.5208944082260132, -0.3494250774383545, 0.0073742615059018135, -0.25851187109947205, -0.39631322026252747, 0.16127613186836243, -0.15565083920955658, 1.1198359727859497, 0.2183191031217575, -0.41198495030403137, 0.0979006439447403, -1.0531339645385742, 0.10840153694152832, 0.4155370891094208, -0.5542910695075989, -0.29795244336128235, -0.34338492155075073, 0.1524895876646042, 0.06105837598443031, 0.5476609468460083, -0.5805845260620117, 0.393196702003479, -0.22409085929393768, 0.09883888065814972, 0.9360595345497131, 0.005606077145785093, 0.2911156713962555, -0.3474108576774597, 0.5787525177001953, -0.09795660525560379, 0.39078691601753235, 0.20058785378932953, -0.6806936860084534, -0.713895857334137, -0.1786763072013855, 0.1832364946603775, 0.7120606899261475, -0.4822003245353699, 0.7719950675964355, -0.37222999334335327, -0.7233944535255432, -0.7406664490699768, 0.14401523768901825, 0.47078394889831543, 0.6058202981948853, 0.3705686628818512, -0.19074031710624695, -0.6699426770210266, -0.9919739961624146, -0.017389394342899323, -0.22956803441047668, -0.03312116861343384, 0.44585734605789185, 0.953447163105011, -0.3265228867530823, 0.7283756136894226, -0.7590339183807373, -0.32064932584762573, -0.24572809040546417, 0.02531294710934162, 0.7412893772125244, 0.49625441431999207, 0.3677752912044525, -0.6714264154434204, -0.4180125892162323, 0.04711689427495003, -0.9093899726867676, -0.2225218415260315, 0.005965671502053738, -0.30837157368659973, 0.3484213054180145, -0.09336987882852554, -0.5657895803451538, 0.607635498046875, 0.516714334487915, -0.6169843673706055, 0.7144439220428467, -0.15694762766361237, 0.4805375933647156, -1.140335202217102, 0.17085039615631104, 0.036986466497182846, 0.014055042527616024, -0.4290114641189575, -0.1945701688528061, 0.05092601478099823, 0.3200395107269287, -0.40197232365608215, 0.6001012921333313, -0.47083404660224915, -0.21085600554943085, -0.07304982841014862, 0.1703082174062729, -0.0776548832654953, 0.5902934074401855, -0.3758673369884491, 0.6694490909576416, 0.5487648248672485, -0.39406484365463257, 0.527637779712677, 0.5014587044715881, -0.5296896696090698, 0.27895957231521606, -0.48294129967689514, -0.021620284765958786, 0.19296713173389435, 0.1915857046842575, -0.8283710479736328, -0.39906981587409973, 0.5195496082305908, -0.5588802695274353, 0.2750439941883087, -0.4041566848754883, -0.6225473880767822, -0.5781779289245605, -0.48784124851226807, 0.19560976326465607, 0.5373894572257996, -0.50571608543396, 0.2847868502140045, 0.4882906377315521, 0.13793359696865082, -0.6762655973434448, -0.6197786927223206, -0.16127948462963104, -0.39658892154693604, -0.6718612909317017, 0.35898447036743164, -0.12448612600564957, -0.26844367384910583, -0.0010046242969110608, -0.07839570194482803, 0.011418604291975498, 0.14313729107379913, 0.37486693263053894, 0.662385880947113, -0.03825247660279274, -0.43915918469429016, -0.24650022387504578, -0.15606991946697235, 0.1800055354833603, 0.0753895714879036, 0.49531498551368713, -0.24168653786182404, -0.2496957629919052, -0.24514806270599365, 0.17574521899223328, 0.4063025712966919, -0.09554518759250641, 0.8189588189125061, 0.7881511449813843, -0.29529231786727905, -0.010359155014157295, -0.3996094763278961, 0.1703348159790039, -0.49368739128112793, 0.2709829807281494, -0.37419044971466064, -0.7033858895301819, 0.8254821300506592, 0.20511828362941742, 0.18050344288349152, 0.7783956527709961, 0.5707056522369385, 0.031626567244529724, 0.8315104842185974, 0.24614012241363525, -0.11870718002319336, 0.45371222496032715, -0.7964257597923279, -0.045797448605298996, -1.1902525424957275, -0.5431094765663147, -0.4514416456222534, -0.24895448982715607, -0.9076718091964722, -0.3639945983886719, 0.27382686734199524, 0.30983537435531616, -0.39401495456695557, 0.4835715889930725, -0.6807719469070435, 0.2454182505607605, 0.5591554045677185, 0.18879954516887665, 0.09065572917461395, -0.09302158653736115, -0.06915206462144852, 0.2316952347755432, -0.4687100946903229, -0.5204065442085266, 1.450097680091858, 0.25002771615982056, 0.6862398386001587, -0.01306263916194439, 0.9279876947402954, 0.3210003972053528, 0.33323270082473755, -0.49208521842956543, 0.6197215914726257, 0.07081981748342514, -0.47961169481277466, -0.2815287709236145, -0.584230899810791, -1.0571837425231934, 0.3147966265678406, -0.06599265336990356, -1.000159740447998, 0.052641741931438446, -0.02313157171010971, -0.03055589459836483, 0.41148847341537476, -0.5667685270309448, 0.8681119084358215, -0.14650775492191315, -0.40049439668655396, 0.08843665570020676, -0.7623106837272644, 0.4866819381713867, 0.0559627041220665, 0.3804038166999817, -0.37836724519729614, 0.031027628108859062, 1.2101819515228271, -0.6531058549880981, 0.7468438744544983, -0.20864450931549072, -0.05228249728679657, 0.37811893224716187, -0.3644721210002899, 0.5999984741210938, 0.053963564336299896, -0.16127555072307587, 0.4739800691604614, -0.12093590199947357, -0.36613625288009644, -0.320418119430542, 0.9556524157524109, -0.972895622253418, -0.4362567067146301, -0.462165504693985, -0.5290611982345581, 0.1393955498933792, 0.27775973081588745, 0.4317359924316406, 0.274663507938385, -0.022700393572449684, 0.049647070467472076, 0.3562258183956146, -0.12549354135990143, 0.5244173407554626, 0.4510992169380188, -0.22663642466068268, -0.6644107103347778, 0.7088004350662231, 0.24520093202590942, -0.042220912873744965, 0.08921512961387634, 0.10187201201915741, -0.45792272686958313, -0.4545688033103943, -0.4620075821876526, 0.321922242641449, -0.6126274466514587, -0.34689095616340637, -0.5154378414154053, -0.2433890849351883, -0.4719447195529938, 0.054893530905246735, -0.4793636202812195, -0.4562393128871918, -0.5083594918251038, -0.18858298659324646, 0.6722809672355652, 0.6677437424659729, -0.32616671919822693, 0.1430792361497879, -0.7737725973129272, 0.3093297779560089, -0.1366724818944931, 0.4052343964576721, -0.07947006076574326, -0.593574583530426, -0.49423107504844666, 0.11942291259765625, -0.4495280385017395, -0.9907243251800537, 0.633191704750061, -0.1253340244293213, 0.7391700148582458, 0.14448654651641846, 0.16307853162288666, 0.7206243872642517, -0.2999809682369232, 1.014456868171692, 0.03609984740614891, -0.6477286219596863, 0.8212781548500061, -0.3671327233314514, 0.12189335376024246, 0.4584357738494873, 0.24169203639030457, -0.4577098786830902, -0.2964036464691162, -0.947101891040802, -1.1100443601608276, 1.0229958295822144, 0.5598171353340149, -0.31988468766212463, 0.11113491654396057, 0.35801663994789124, -0.04088069871068001, 0.055382490158081055, -0.6150795817375183, -0.8990305662155151, -0.2131544053554535, -0.22583331167697906, 0.021306045353412628, -0.016690492630004883, -0.4695403277873993, -0.4810448884963989, 0.9333221316337585, -0.04536344110965729, 0.45617520809173584, 0.20135246217250824, -0.05096957087516785, -0.08970541507005692, 0.2668739855289459, 0.35278406739234924, 0.7264975905418396, -0.3871487081050873, -0.13542161881923676, 0.32434096932411194, -0.6559406518936157, 0.053171221166849136, 0.31671151518821716, -0.010109536349773407, -0.10537964850664139, 0.5954810976982117, 0.9798029065132141, 0.04380926117300987, -0.3092425465583801, 0.5442556738853455, 0.01947903260588646, -0.2994978427886963, -0.554121196269989, 0.19499193131923676, -0.03913280740380287, 0.38292285799980164, 0.47076186537742615, -0.06617800891399384, 0.0919317975640297, -0.2562173008918762, 0.14833305776119232, 0.2190176248550415, -0.030957935377955437, -0.3174917995929718, 0.601930558681488, -0.06833016872406006, -0.339262992143631, 0.73329758644104, -0.1292709857225418, -0.5349316000938416, 1.0383061170578003, 0.427036851644516, 0.8727284669876099, -0.1378568708896637, 0.11803784966468811, 0.6016849875450134, 0.252419114112854, -0.1608182042837143, 0.549036979675293, 0.093461774289608, -0.646053671836853, -0.18125300109386444, -0.7651002407073975, -0.20956696569919586, 0.3371172249317169, -1.087371587753296, 0.2871612310409546, -0.04832138121128082, -0.24518613517284393, -0.1617291122674942, 0.47965508699417114, -0.8940210938453674, 0.11017575114965439, 0.04180682823061943, 0.8692085146903992, -1.0085546970367432, 0.8040376901626587, 0.7379724383354187, -0.496296763420105, -0.9027662873268127, -0.236137256026268, 0.0575101301074028, -0.8220950961112976, 0.47059741616249084, 0.24501371383666992, 0.3587457835674286, -0.17426778376102448, -0.6026561260223389, -1.0474961996078491, 1.5532803535461426, 0.11659593880176544, -0.474872887134552, 0.10952255129814148, 0.09011446684598923, 0.3719920516014099, -0.30587804317474365, 0.5221152305603027, 0.8658620715141296, 0.7768522500991821, -0.14005833864212036, -0.981484591960907, 0.29206863045692444, -0.5451794266700745, -0.0444229356944561, 0.22423560917377472, -0.8860034346580505, 0.9056833982467651, -0.20315749943256378, 0.0010570413433015347, -0.0883590504527092, 0.5183515548706055, 0.610837459564209, 0.30126020312309265, 0.4645031690597534, 0.6363416314125061, 0.8000022768974304, -0.2877817451953888, 1.1569993495941162, -0.2205003798007965, 0.8189229369163513, 0.9889811873435974, 0.04840876907110214, 0.6795501112937927, 0.3197271525859833, -0.5403732657432556, 0.4938611686229706, 0.8984914422035217, -0.41446080803871155, 0.47418463230133057, 0.18982452154159546, -0.07443241029977798, 0.032777395099401474, 0.05364256724715233, -0.45176953077316284, 0.40856844186782837, 0.17105385661125183, -0.39490118622779846, -0.06195910647511482, -0.17604441940784454, 0.18689414858818054, -0.3138945996761322, -0.28402695059776306, 0.5837854146957397, -0.02113228105008602, -0.5483431816101074, 0.8049319386482239, -0.058578282594680786, 0.78121417760849, -0.6301227807998657, -0.11332866549491882, -0.2627701461315155, 0.1600639671087265, -0.5876446962356567, -0.9945868253707886, 0.1878042370080948, 0.1534503698348999, -0.25999391078948975, -0.10823339968919754, 0.6274330019950867, -0.32049039006233215, -0.5618902444839478, 0.45736345648765564, 0.36777669191360474, 0.37035346031188965, 0.16703245043754578, -0.9013412594795227, 0.18264199793338776, 0.27063024044036865, -0.8624603748321533, 0.40294525027275085, 0.33298254013061523, 0.03735429048538208, 0.5531011819839478, 0.6970336437225342, 0.1568392962217331, 0.1347508579492569, 0.04510851576924324, 1.0770337581634521, -0.7423942685127258, -0.34291407465934753, -0.7844164967536926, 0.7740560173988342, -0.30889835953712463, -0.6316177248954773, 0.997881293296814, 0.9632260203361511, 0.8231000900268555, 0.10571368783712387, 0.8517376184463501, -0.4371233880519867, 0.3749023675918579, -0.43113136291503906, 0.8744475841522217, -0.7063195109367371, 0.3392503261566162, -0.27256232500076294, -0.885747492313385, -0.0031487413216382265, 0.665523886680603, -0.17604941129684448, -0.036499232053756714, 0.5562271475791931, 0.9064593315124512, 0.04682833328843117, 0.14745095372200012, 0.04346075654029846, 0.35109373927116394, 0.2572350800037384, 0.5492815971374512, 0.6004661321640015, -0.7547659873962402, 0.47973889112472534, -0.6399883031845093, -0.44600236415863037, -0.13237036764621735, -0.6690930724143982, -0.8488511443138123, -0.5518924593925476, -0.3580354154109955, -0.5974997282028198, -0.08260355144739151, 0.9825266599655151, 0.43814998865127563, -0.7950524687767029, -0.37848979234695435, 0.07563278079032898, 0.22786520421504974, -0.18057547509670258, -0.3528901934623718, 0.6767264604568481, -0.05380015820264816, -0.8381980657577515, 0.39454707503318787, -0.15976478159427643, -0.13351453840732574, -0.08709969371557236, -0.3171077370643616, -0.3217379152774811, -0.27391931414604187, 0.4000895023345947, 0.250200092792511, -0.6724192500114441, -0.26528629660606384, -0.1883201003074646, -0.07302868366241455, 0.2808036804199219, 0.3636438548564911, -0.564159095287323, 0.06010257825255394, 0.5797010064125061, 0.1581236869096756, 0.7604926228523254, -0.014482036232948303, 0.26865556836128235, -0.67970871925354, -0.05099305137991905, -0.08347433805465698, 0.5861287117004395, 0.2570870518684387, -0.556256890296936, 0.9907447099685669, 0.34517207741737366, -0.7772194743156433, -0.8456430435180664, -0.31087008118629456, -1.1212116479873657, -0.04384089633822441, 1.4518636465072632, -0.31552812457084656, -0.4630207121372223, -0.03230070322751999, -0.1833498179912567, 0.4972285330295563, -0.6505675911903381, 0.527793824672699, 0.6721755862236023, -0.36951684951782227, 0.05461912229657173, -0.7277516722679138, 0.4145170748233795, -0.026318613439798355, -0.9796652793884277, 0.07089198380708694, 0.35284703969955444, 0.48791274428367615, 0.17705614864826202, 0.7595349550247192, -0.10308071225881577, -0.15160658955574036, 0.017404207959771156, 0.26687386631965637, -0.25954559445381165, -0.06478724628686905, -0.20390500128269196, 0.06530991196632385, -0.4139673113822937, -0.5403522849082947 ]