Delete upstage
Browse files- upstage/Llama-2-70b-instruct-1024/results_2023-10-18T23-28-06.884616.json +0 -107
- upstage/Llama-2-70b-instruct/results_2023-07-31T16-38-35.808290.json +0 -1365
- upstage/Llama-2-70b-instruct/results_2023-10-17T12-48-24.237609.json +0 -107
- upstage/SOLAR-0-70b-16bit/results_2023-08-03T01-46-57.047903.json +0 -1365
- upstage/SOLAR-0-70b-16bit/results_2023-10-04T17-16-57.736703.json +0 -1367
- upstage/SOLAR-0-70b-16bit/results_2023-11-07T01-00-47.965413.json +0 -107
- upstage/SOLAR-10.7B-Instruct-v1.0/results_2023-12-13T16-15-33.088115.json +0 -1409
- upstage/SOLAR-10.7B-Instruct-v1.0/results_2023-12-13T21-02-33.929144.json +0 -1409
- upstage/SOLAR-10.7B-v1.0/results_2023-12-13T16-05-57.212237.json +0 -1409
- upstage/SOLAR-10.7B-v1.0/results_2023-12-13T16-09-54.285787.json +0 -1409
- upstage/llama-30b-instruct-2048/results_2023-07-19T12-29-43.161348.json +0 -871
- upstage/llama-30b-instruct-2048/results_2023-10-19T00-52-48.467311.json +0 -107
- upstage/llama-30b-instruct/results_2023-07-19T22-33-00.369415.json +0 -871
- upstage/llama-30b-instruct/results_2023-09-17T15-33-08.826830.json +0 -107
- upstage/llama-65b-instruct/results_2023-07-31T16-32-35.958499.json +0 -1365
- upstage/llama-65b-instruct/results_2023-08-14T23-57-10.007854.json +0 -1365
- upstage/llama-65b-instruct/results_2023-10-17T01-44-05.835561.json +0 -107
- upstage/llama-65b-instruct/results_2023-10-24T19-27-31.642045.json +0 -107
upstage/Llama-2-70b-instruct-1024/results_2023-10-18T23-28-06.884616.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/Llama-2-70b-instruct-1024",
|
4 |
-
"model_sha": "4e73109f096f1eae71b184f4581965504b2e7448",
|
5 |
-
"model_size": "128.56 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.49989513422818793,
|
17 |
-
"em_stderr": 0.005120467878578845,
|
18 |
-
"f1": 0.5841736577181234,
|
19 |
-
"f1_stderr": 0.004671177225967014
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.32221379833206976,
|
23 |
-
"acc_stderr": 0.01287243548118878
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.8287292817679558,
|
27 |
-
"acc_stderr": 0.010588417294962526
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.49989513422818793,
|
31 |
-
"em_stderr": 0.005120467878578845,
|
32 |
-
"f1": 0.5841736577181234,
|
33 |
-
"f1_stderr": 0.004671177225967014,
|
34 |
-
"acc": 0.5754715400500128,
|
35 |
-
"acc_stderr": 0.011730426388075654
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"harness|drop|3": 1,
|
40 |
-
"harness|gsm8k|5": 0,
|
41 |
-
"harness|winogrande|5": 0,
|
42 |
-
"all": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "61b608e0b5ceed76",
|
55 |
-
"hash_cont_tokens": "7002267c913b5ac4"
|
56 |
-
},
|
57 |
-
"truncated": 1263,
|
58 |
-
"non-truncated": 8273,
|
59 |
-
"padded": 0,
|
60 |
-
"non-padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "6bb254e893ddfe4a"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non-truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non-padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non-truncated": 2534,
|
87 |
-
"padded": 2432,
|
88 |
-
"non-padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "80afe720f936f8d2",
|
98 |
-
"hash_cont_tokens": "21a6cde0a3dbfc7c"
|
99 |
-
},
|
100 |
-
"total_evaluation_time_secondes": "135750.9448003769",
|
101 |
-
"truncated": 1263,
|
102 |
-
"non-truncated": 12126,
|
103 |
-
"padded": 2432,
|
104 |
-
"non-padded": 10957,
|
105 |
-
"num_truncated_few_shots": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/Llama-2-70b-instruct/results_2023-07-31T16-38-35.808290.json
DELETED
@@ -1,1365 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|arc:challenge|25": {
|
4 |
-
"acc": 0.6706484641638225,
|
5 |
-
"acc_stderr": 0.013734057652635474,
|
6 |
-
"acc_norm": 0.7090443686006825,
|
7 |
-
"acc_norm_stderr": 0.013273077865907592
|
8 |
-
},
|
9 |
-
"harness|hellaswag|10": {
|
10 |
-
"acc": 0.6877116112328221,
|
11 |
-
"acc_stderr": 0.0046247963481288006,
|
12 |
-
"acc_norm": 0.8748257319259112,
|
13 |
-
"acc_norm_stderr": 0.003302401106926325
|
14 |
-
},
|
15 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
16 |
-
"acc": 0.41,
|
17 |
-
"acc_stderr": 0.049431107042371025,
|
18 |
-
"acc_norm": 0.41,
|
19 |
-
"acc_norm_stderr": 0.049431107042371025
|
20 |
-
},
|
21 |
-
"harness|hendrycksTest-anatomy|5": {
|
22 |
-
"acc": 0.6222222222222222,
|
23 |
-
"acc_stderr": 0.04188307537595852,
|
24 |
-
"acc_norm": 0.6222222222222222,
|
25 |
-
"acc_norm_stderr": 0.04188307537595852
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-astronomy|5": {
|
28 |
-
"acc": 0.8157894736842105,
|
29 |
-
"acc_stderr": 0.031546980450822305,
|
30 |
-
"acc_norm": 0.8157894736842105,
|
31 |
-
"acc_norm_stderr": 0.031546980450822305
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-business_ethics|5": {
|
34 |
-
"acc": 0.76,
|
35 |
-
"acc_stderr": 0.04292346959909284,
|
36 |
-
"acc_norm": 0.76,
|
37 |
-
"acc_norm_stderr": 0.04292346959909284
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
40 |
-
"acc": 0.7547169811320755,
|
41 |
-
"acc_stderr": 0.02648035717989569,
|
42 |
-
"acc_norm": 0.7547169811320755,
|
43 |
-
"acc_norm_stderr": 0.02648035717989569
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-college_biology|5": {
|
46 |
-
"acc": 0.8472222222222222,
|
47 |
-
"acc_stderr": 0.030085743248565663,
|
48 |
-
"acc_norm": 0.8472222222222222,
|
49 |
-
"acc_norm_stderr": 0.030085743248565663
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
52 |
-
"acc": 0.47,
|
53 |
-
"acc_stderr": 0.05016135580465919,
|
54 |
-
"acc_norm": 0.47,
|
55 |
-
"acc_norm_stderr": 0.05016135580465919
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
58 |
-
"acc": 0.52,
|
59 |
-
"acc_stderr": 0.050211673156867795,
|
60 |
-
"acc_norm": 0.52,
|
61 |
-
"acc_norm_stderr": 0.050211673156867795
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
64 |
-
"acc": 0.42,
|
65 |
-
"acc_stderr": 0.049604496374885836,
|
66 |
-
"acc_norm": 0.42,
|
67 |
-
"acc_norm_stderr": 0.049604496374885836
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_medicine|5": {
|
70 |
-
"acc": 0.6589595375722543,
|
71 |
-
"acc_stderr": 0.036146654241808254,
|
72 |
-
"acc_norm": 0.6589595375722543,
|
73 |
-
"acc_norm_stderr": 0.036146654241808254
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_physics|5": {
|
76 |
-
"acc": 0.43137254901960786,
|
77 |
-
"acc_stderr": 0.04928099597287534,
|
78 |
-
"acc_norm": 0.43137254901960786,
|
79 |
-
"acc_norm_stderr": 0.04928099597287534
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-computer_security|5": {
|
82 |
-
"acc": 0.75,
|
83 |
-
"acc_stderr": 0.04351941398892446,
|
84 |
-
"acc_norm": 0.75,
|
85 |
-
"acc_norm_stderr": 0.04351941398892446
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
88 |
-
"acc": 0.6893617021276596,
|
89 |
-
"acc_stderr": 0.03025123757921317,
|
90 |
-
"acc_norm": 0.6893617021276596,
|
91 |
-
"acc_norm_stderr": 0.03025123757921317
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-econometrics|5": {
|
94 |
-
"acc": 0.45614035087719296,
|
95 |
-
"acc_stderr": 0.04685473041907789,
|
96 |
-
"acc_norm": 0.45614035087719296,
|
97 |
-
"acc_norm_stderr": 0.04685473041907789
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
100 |
-
"acc": 0.6275862068965518,
|
101 |
-
"acc_stderr": 0.04028731532947558,
|
102 |
-
"acc_norm": 0.6275862068965518,
|
103 |
-
"acc_norm_stderr": 0.04028731532947558
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
106 |
-
"acc": 0.46296296296296297,
|
107 |
-
"acc_stderr": 0.025680564640056882,
|
108 |
-
"acc_norm": 0.46296296296296297,
|
109 |
-
"acc_norm_stderr": 0.025680564640056882
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-formal_logic|5": {
|
112 |
-
"acc": 0.4603174603174603,
|
113 |
-
"acc_stderr": 0.04458029125470973,
|
114 |
-
"acc_norm": 0.4603174603174603,
|
115 |
-
"acc_norm_stderr": 0.04458029125470973
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-global_facts|5": {
|
118 |
-
"acc": 0.45,
|
119 |
-
"acc_stderr": 0.049999999999999996,
|
120 |
-
"acc_norm": 0.45,
|
121 |
-
"acc_norm_stderr": 0.049999999999999996
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
124 |
-
"acc": 0.8161290322580645,
|
125 |
-
"acc_stderr": 0.022037217340267826,
|
126 |
-
"acc_norm": 0.8161290322580645,
|
127 |
-
"acc_norm_stderr": 0.022037217340267826
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
130 |
-
"acc": 0.5467980295566502,
|
131 |
-
"acc_stderr": 0.03502544650845872,
|
132 |
-
"acc_norm": 0.5467980295566502,
|
133 |
-
"acc_norm_stderr": 0.03502544650845872
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
136 |
-
"acc": 0.77,
|
137 |
-
"acc_stderr": 0.04229525846816505,
|
138 |
-
"acc_norm": 0.77,
|
139 |
-
"acc_norm_stderr": 0.04229525846816505
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
142 |
-
"acc": 0.8303030303030303,
|
143 |
-
"acc_stderr": 0.029311188674983134,
|
144 |
-
"acc_norm": 0.8303030303030303,
|
145 |
-
"acc_norm_stderr": 0.029311188674983134
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
148 |
-
"acc": 0.8888888888888888,
|
149 |
-
"acc_stderr": 0.022390787638216766,
|
150 |
-
"acc_norm": 0.8888888888888888,
|
151 |
-
"acc_norm_stderr": 0.022390787638216766
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
154 |
-
"acc": 0.927461139896373,
|
155 |
-
"acc_stderr": 0.018718998520678178,
|
156 |
-
"acc_norm": 0.927461139896373,
|
157 |
-
"acc_norm_stderr": 0.018718998520678178
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
160 |
-
"acc": 0.7128205128205128,
|
161 |
-
"acc_stderr": 0.02293992541853062,
|
162 |
-
"acc_norm": 0.7128205128205128,
|
163 |
-
"acc_norm_stderr": 0.02293992541853062
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
166 |
-
"acc": 0.3111111111111111,
|
167 |
-
"acc_stderr": 0.02822644674968352,
|
168 |
-
"acc_norm": 0.3111111111111111,
|
169 |
-
"acc_norm_stderr": 0.02822644674968352
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
172 |
-
"acc": 0.7563025210084033,
|
173 |
-
"acc_stderr": 0.027886828078380558,
|
174 |
-
"acc_norm": 0.7563025210084033,
|
175 |
-
"acc_norm_stderr": 0.027886828078380558
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
178 |
-
"acc": 0.45695364238410596,
|
179 |
-
"acc_stderr": 0.04067325174247443,
|
180 |
-
"acc_norm": 0.45695364238410596,
|
181 |
-
"acc_norm_stderr": 0.04067325174247443
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
184 |
-
"acc": 0.8954128440366973,
|
185 |
-
"acc_stderr": 0.013120530245265572,
|
186 |
-
"acc_norm": 0.8954128440366973,
|
187 |
-
"acc_norm_stderr": 0.013120530245265572
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
190 |
-
"acc": 0.6018518518518519,
|
191 |
-
"acc_stderr": 0.033384734032074016,
|
192 |
-
"acc_norm": 0.6018518518518519,
|
193 |
-
"acc_norm_stderr": 0.033384734032074016
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
196 |
-
"acc": 0.9117647058823529,
|
197 |
-
"acc_stderr": 0.019907399791316942,
|
198 |
-
"acc_norm": 0.9117647058823529,
|
199 |
-
"acc_norm_stderr": 0.019907399791316942
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
202 |
-
"acc": 0.890295358649789,
|
203 |
-
"acc_stderr": 0.020343400734868837,
|
204 |
-
"acc_norm": 0.890295358649789,
|
205 |
-
"acc_norm_stderr": 0.020343400734868837
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-human_aging|5": {
|
208 |
-
"acc": 0.7668161434977578,
|
209 |
-
"acc_stderr": 0.028380391147094706,
|
210 |
-
"acc_norm": 0.7668161434977578,
|
211 |
-
"acc_norm_stderr": 0.028380391147094706
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
214 |
-
"acc": 0.8625954198473282,
|
215 |
-
"acc_stderr": 0.030194823996804475,
|
216 |
-
"acc_norm": 0.8625954198473282,
|
217 |
-
"acc_norm_stderr": 0.030194823996804475
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-international_law|5": {
|
220 |
-
"acc": 0.8512396694214877,
|
221 |
-
"acc_stderr": 0.03248470083807193,
|
222 |
-
"acc_norm": 0.8512396694214877,
|
223 |
-
"acc_norm_stderr": 0.03248470083807193
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
226 |
-
"acc": 0.8240740740740741,
|
227 |
-
"acc_stderr": 0.036809181416738807,
|
228 |
-
"acc_norm": 0.8240740740740741,
|
229 |
-
"acc_norm_stderr": 0.036809181416738807
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
232 |
-
"acc": 0.803680981595092,
|
233 |
-
"acc_stderr": 0.031207970394709225,
|
234 |
-
"acc_norm": 0.803680981595092,
|
235 |
-
"acc_norm_stderr": 0.031207970394709225
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-machine_learning|5": {
|
238 |
-
"acc": 0.5178571428571429,
|
239 |
-
"acc_stderr": 0.047427623612430116,
|
240 |
-
"acc_norm": 0.5178571428571429,
|
241 |
-
"acc_norm_stderr": 0.047427623612430116
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-management|5": {
|
244 |
-
"acc": 0.8252427184466019,
|
245 |
-
"acc_stderr": 0.0376017800602662,
|
246 |
-
"acc_norm": 0.8252427184466019,
|
247 |
-
"acc_norm_stderr": 0.0376017800602662
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-marketing|5": {
|
250 |
-
"acc": 0.905982905982906,
|
251 |
-
"acc_stderr": 0.019119892798924974,
|
252 |
-
"acc_norm": 0.905982905982906,
|
253 |
-
"acc_norm_stderr": 0.019119892798924974
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
256 |
-
"acc": 0.7,
|
257 |
-
"acc_stderr": 0.046056618647183814,
|
258 |
-
"acc_norm": 0.7,
|
259 |
-
"acc_norm_stderr": 0.046056618647183814
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
262 |
-
"acc": 0.8735632183908046,
|
263 |
-
"acc_stderr": 0.011884488905895538,
|
264 |
-
"acc_norm": 0.8735632183908046,
|
265 |
-
"acc_norm_stderr": 0.011884488905895538
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
268 |
-
"acc": 0.7774566473988439,
|
269 |
-
"acc_stderr": 0.02239421566194282,
|
270 |
-
"acc_norm": 0.7774566473988439,
|
271 |
-
"acc_norm_stderr": 0.02239421566194282
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
274 |
-
"acc": 0.5932960893854748,
|
275 |
-
"acc_stderr": 0.016428811915898858,
|
276 |
-
"acc_norm": 0.5932960893854748,
|
277 |
-
"acc_norm_stderr": 0.016428811915898858
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-nutrition|5": {
|
280 |
-
"acc": 0.7418300653594772,
|
281 |
-
"acc_stderr": 0.025058503316958157,
|
282 |
-
"acc_norm": 0.7418300653594772,
|
283 |
-
"acc_norm_stderr": 0.025058503316958157
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-philosophy|5": {
|
286 |
-
"acc": 0.752411575562701,
|
287 |
-
"acc_stderr": 0.024513879973621967,
|
288 |
-
"acc_norm": 0.752411575562701,
|
289 |
-
"acc_norm_stderr": 0.024513879973621967
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-prehistory|5": {
|
292 |
-
"acc": 0.8395061728395061,
|
293 |
-
"acc_stderr": 0.02042395535477803,
|
294 |
-
"acc_norm": 0.8395061728395061,
|
295 |
-
"acc_norm_stderr": 0.02042395535477803
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
298 |
-
"acc": 0.5567375886524822,
|
299 |
-
"acc_stderr": 0.02963483847376601,
|
300 |
-
"acc_norm": 0.5567375886524822,
|
301 |
-
"acc_norm_stderr": 0.02963483847376601
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-professional_law|5": {
|
304 |
-
"acc": 0.5573663624511083,
|
305 |
-
"acc_stderr": 0.01268590653820624,
|
306 |
-
"acc_norm": 0.5573663624511083,
|
307 |
-
"acc_norm_stderr": 0.01268590653820624
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
310 |
-
"acc": 0.7058823529411765,
|
311 |
-
"acc_stderr": 0.02767846864214472,
|
312 |
-
"acc_norm": 0.7058823529411765,
|
313 |
-
"acc_norm_stderr": 0.02767846864214472
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
316 |
-
"acc": 0.7630718954248366,
|
317 |
-
"acc_stderr": 0.017201662169789772,
|
318 |
-
"acc_norm": 0.7630718954248366,
|
319 |
-
"acc_norm_stderr": 0.017201662169789772
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-public_relations|5": {
|
322 |
-
"acc": 0.7181818181818181,
|
323 |
-
"acc_stderr": 0.043091187099464585,
|
324 |
-
"acc_norm": 0.7181818181818181,
|
325 |
-
"acc_norm_stderr": 0.043091187099464585
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-security_studies|5": {
|
328 |
-
"acc": 0.7714285714285715,
|
329 |
-
"acc_stderr": 0.02688214492230774,
|
330 |
-
"acc_norm": 0.7714285714285715,
|
331 |
-
"acc_norm_stderr": 0.02688214492230774
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-sociology|5": {
|
334 |
-
"acc": 0.8656716417910447,
|
335 |
-
"acc_stderr": 0.024112678240900798,
|
336 |
-
"acc_norm": 0.8656716417910447,
|
337 |
-
"acc_norm_stderr": 0.024112678240900798
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
340 |
-
"acc": 0.89,
|
341 |
-
"acc_stderr": 0.03144660377352202,
|
342 |
-
"acc_norm": 0.89,
|
343 |
-
"acc_norm_stderr": 0.03144660377352202
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-virology|5": {
|
346 |
-
"acc": 0.5240963855421686,
|
347 |
-
"acc_stderr": 0.03887971849597264,
|
348 |
-
"acc_norm": 0.5240963855421686,
|
349 |
-
"acc_norm_stderr": 0.03887971849597264
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-world_religions|5": {
|
352 |
-
"acc": 0.8771929824561403,
|
353 |
-
"acc_stderr": 0.02517298435015575,
|
354 |
-
"acc_norm": 0.8771929824561403,
|
355 |
-
"acc_norm_stderr": 0.02517298435015575
|
356 |
-
},
|
357 |
-
"harness|truthfulqa:mc|0": {
|
358 |
-
"mc1": 0.423500611995104,
|
359 |
-
"mc1_stderr": 0.01729742144853473,
|
360 |
-
"mc2": 0.6096689044038274,
|
361 |
-
"mc2_stderr": 0.014988258592323236
|
362 |
-
},
|
363 |
-
"all": {
|
364 |
-
"acc": 0.6973946921458888,
|
365 |
-
"acc_stderr": 0.031191911192388785,
|
366 |
-
"acc_norm": 0.7012168959616505,
|
367 |
-
"acc_norm_stderr": 0.031161684497000137,
|
368 |
-
"mc1": 0.423500611995104,
|
369 |
-
"mc1_stderr": 0.01729742144853473,
|
370 |
-
"mc2": 0.6096689044038274,
|
371 |
-
"mc2_stderr": 0.014988258592323236
|
372 |
-
}
|
373 |
-
},
|
374 |
-
"versions": {
|
375 |
-
"harness|arc:challenge|25": 0,
|
376 |
-
"harness|hellaswag|10": 0,
|
377 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
378 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
379 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
380 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
381 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
382 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
383 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
384 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
385 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
386 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
387 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
388 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
389 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
390 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
391 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
392 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
393 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
394 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
395 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
396 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
397 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
398 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
399 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
400 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
401 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
402 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
403 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
404 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
405 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
406 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
409 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
410 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
411 |
-
"harness|hendrycksTest-international_law|5": 1,
|
412 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
413 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
414 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
415 |
-
"harness|hendrycksTest-management|5": 1,
|
416 |
-
"harness|hendrycksTest-marketing|5": 1,
|
417 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
418 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
419 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
420 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
421 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
422 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
423 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
424 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
425 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
426 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
427 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
428 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
429 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
430 |
-
"harness|hendrycksTest-sociology|5": 1,
|
431 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
432 |
-
"harness|hendrycksTest-virology|5": 1,
|
433 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
434 |
-
"harness|truthfulqa:mc|0": 1,
|
435 |
-
"all": 0
|
436 |
-
},
|
437 |
-
"config_general": {
|
438 |
-
"model_name": "upstage/Llama-2-70b-instruct",
|
439 |
-
"model_sha": "8469429924dc2e1a9394b8095753985668a4052e",
|
440 |
-
"model_dtype": "torch.float16",
|
441 |
-
"lighteval_sha": "03c2fad20ff7f5334c33cfee459024b8d7e4a109",
|
442 |
-
"num_few_shot_default": 0,
|
443 |
-
"num_fewshot_seeds": 1,
|
444 |
-
"override_batch_size": 1,
|
445 |
-
"max_samples": null
|
446 |
-
},
|
447 |
-
"config_tasks": {
|
448 |
-
"harness|arc:challenge": "LM Harness task",
|
449 |
-
"harness|hellaswag": "LM Harness task",
|
450 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
451 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
507 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
508 |
-
},
|
509 |
-
"summary_tasks": {
|
510 |
-
"harness|arc:challenge|25": {
|
511 |
-
"hashes": {
|
512 |
-
"hash_examples": "17b0cae357c0259e",
|
513 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
514 |
-
"hash_input_tokens": "61571bf68d6d89aa",
|
515 |
-
"hash_cont_tokens": "ede2b335438f08e9"
|
516 |
-
},
|
517 |
-
"truncated": 0,
|
518 |
-
"non-truncated": 4687,
|
519 |
-
"padded": 4687,
|
520 |
-
"non-padded": 0,
|
521 |
-
"effective_few_shots": 25.0,
|
522 |
-
"num_truncated_few_shots": 0
|
523 |
-
},
|
524 |
-
"harness|hellaswag|10": {
|
525 |
-
"hashes": {
|
526 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
527 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
528 |
-
"hash_input_tokens": "29906669b1c7054a",
|
529 |
-
"hash_cont_tokens": "b41cf1ad182d68d5"
|
530 |
-
},
|
531 |
-
"truncated": 0,
|
532 |
-
"non-truncated": 40168,
|
533 |
-
"padded": 40113,
|
534 |
-
"non-padded": 55,
|
535 |
-
"effective_few_shots": 10.0,
|
536 |
-
"num_truncated_few_shots": 0
|
537 |
-
},
|
538 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
539 |
-
"hashes": {
|
540 |
-
"hash_examples": "280f9f325b40559a",
|
541 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
542 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
543 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
544 |
-
},
|
545 |
-
"truncated": 0,
|
546 |
-
"non-truncated": 400,
|
547 |
-
"padded": 400,
|
548 |
-
"non-padded": 0,
|
549 |
-
"effective_few_shots": 5.0,
|
550 |
-
"num_truncated_few_shots": 0
|
551 |
-
},
|
552 |
-
"harness|hendrycksTest-anatomy|5": {
|
553 |
-
"hashes": {
|
554 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
555 |
-
"hash_full_prompts": "516f74bef25df620",
|
556 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
557 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
558 |
-
},
|
559 |
-
"truncated": 0,
|
560 |
-
"non-truncated": 540,
|
561 |
-
"padded": 540,
|
562 |
-
"non-padded": 0,
|
563 |
-
"effective_few_shots": 5.0,
|
564 |
-
"num_truncated_few_shots": 0
|
565 |
-
},
|
566 |
-
"harness|hendrycksTest-astronomy|5": {
|
567 |
-
"hashes": {
|
568 |
-
"hash_examples": "7d587b908da4d762",
|
569 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
570 |
-
"hash_input_tokens": "277a7b1fad566940",
|
571 |
-
"hash_cont_tokens": "238bd86950544b29"
|
572 |
-
},
|
573 |
-
"truncated": 0,
|
574 |
-
"non-truncated": 608,
|
575 |
-
"padded": 608,
|
576 |
-
"non-padded": 0,
|
577 |
-
"effective_few_shots": 5.0,
|
578 |
-
"num_truncated_few_shots": 0
|
579 |
-
},
|
580 |
-
"harness|hendrycksTest-business_ethics|5": {
|
581 |
-
"hashes": {
|
582 |
-
"hash_examples": "33e51740670de686",
|
583 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
584 |
-
"hash_input_tokens": "ba552605bc116de5",
|
585 |
-
"hash_cont_tokens": "f9d6d2a7d7e9a041"
|
586 |
-
},
|
587 |
-
"truncated": 0,
|
588 |
-
"non-truncated": 400,
|
589 |
-
"padded": 400,
|
590 |
-
"non-padded": 0,
|
591 |
-
"effective_few_shots": 5.0,
|
592 |
-
"num_truncated_few_shots": 0
|
593 |
-
},
|
594 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
595 |
-
"hashes": {
|
596 |
-
"hash_examples": "f3366dbe7eefffa4",
|
597 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
598 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
599 |
-
"hash_cont_tokens": "6af58623d0d5fbcd"
|
600 |
-
},
|
601 |
-
"truncated": 0,
|
602 |
-
"non-truncated": 1060,
|
603 |
-
"padded": 1060,
|
604 |
-
"non-padded": 0,
|
605 |
-
"effective_few_shots": 5.0,
|
606 |
-
"num_truncated_few_shots": 0
|
607 |
-
},
|
608 |
-
"harness|hendrycksTest-college_biology|5": {
|
609 |
-
"hashes": {
|
610 |
-
"hash_examples": "ca2b6753a0193e7f",
|
611 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
612 |
-
"hash_input_tokens": "da036601573942e2",
|
613 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
614 |
-
},
|
615 |
-
"truncated": 0,
|
616 |
-
"non-truncated": 576,
|
617 |
-
"padded": 576,
|
618 |
-
"non-padded": 0,
|
619 |
-
"effective_few_shots": 5.0,
|
620 |
-
"num_truncated_few_shots": 0
|
621 |
-
},
|
622 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
623 |
-
"hashes": {
|
624 |
-
"hash_examples": "22ff85f1d34f42d1",
|
625 |
-
"hash_full_prompts": "242c9be6da583e95",
|
626 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
627 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
628 |
-
},
|
629 |
-
"truncated": 0,
|
630 |
-
"non-truncated": 400,
|
631 |
-
"padded": 400,
|
632 |
-
"non-padded": 0,
|
633 |
-
"effective_few_shots": 5.0,
|
634 |
-
"num_truncated_few_shots": 0
|
635 |
-
},
|
636 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
637 |
-
"hashes": {
|
638 |
-
"hash_examples": "30318289d717a5cf",
|
639 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
640 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
641 |
-
"hash_cont_tokens": "1ba0c71186b1505e"
|
642 |
-
},
|
643 |
-
"truncated": 0,
|
644 |
-
"non-truncated": 400,
|
645 |
-
"padded": 400,
|
646 |
-
"non-padded": 0,
|
647 |
-
"effective_few_shots": 5.0,
|
648 |
-
"num_truncated_few_shots": 0
|
649 |
-
},
|
650 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
651 |
-
"hashes": {
|
652 |
-
"hash_examples": "4944d1f0b6b5d911",
|
653 |
-
"hash_full_prompts": "770bc4281c973190",
|
654 |
-
"hash_input_tokens": "614054d17109a25d",
|
655 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
656 |
-
},
|
657 |
-
"truncated": 0,
|
658 |
-
"non-truncated": 400,
|
659 |
-
"padded": 400,
|
660 |
-
"non-padded": 0,
|
661 |
-
"effective_few_shots": 5.0,
|
662 |
-
"num_truncated_few_shots": 0
|
663 |
-
},
|
664 |
-
"harness|hendrycksTest-college_medicine|5": {
|
665 |
-
"hashes": {
|
666 |
-
"hash_examples": "dd69cc33381275af",
|
667 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
668 |
-
"hash_input_tokens": "1d633b3cc0524ba8",
|
669 |
-
"hash_cont_tokens": "702fb6d82ff0d6ac"
|
670 |
-
},
|
671 |
-
"truncated": 0,
|
672 |
-
"non-truncated": 692,
|
673 |
-
"padded": 692,
|
674 |
-
"non-padded": 0,
|
675 |
-
"effective_few_shots": 5.0,
|
676 |
-
"num_truncated_few_shots": 0
|
677 |
-
},
|
678 |
-
"harness|hendrycksTest-college_physics|5": {
|
679 |
-
"hashes": {
|
680 |
-
"hash_examples": "875dd26d22655b0d",
|
681 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
682 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
683 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
684 |
-
},
|
685 |
-
"truncated": 0,
|
686 |
-
"non-truncated": 408,
|
687 |
-
"padded": 408,
|
688 |
-
"non-padded": 0,
|
689 |
-
"effective_few_shots": 5.0,
|
690 |
-
"num_truncated_few_shots": 0
|
691 |
-
},
|
692 |
-
"harness|hendrycksTest-computer_security|5": {
|
693 |
-
"hashes": {
|
694 |
-
"hash_examples": "006451eedc0ededb",
|
695 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
696 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
697 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
698 |
-
},
|
699 |
-
"truncated": 0,
|
700 |
-
"non-truncated": 400,
|
701 |
-
"padded": 400,
|
702 |
-
"non-padded": 0,
|
703 |
-
"effective_few_shots": 5.0,
|
704 |
-
"num_truncated_few_shots": 0
|
705 |
-
},
|
706 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
707 |
-
"hashes": {
|
708 |
-
"hash_examples": "8874ece872d2ca4c",
|
709 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
710 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
711 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
712 |
-
},
|
713 |
-
"truncated": 0,
|
714 |
-
"non-truncated": 940,
|
715 |
-
"padded": 940,
|
716 |
-
"non-padded": 0,
|
717 |
-
"effective_few_shots": 5.0,
|
718 |
-
"num_truncated_few_shots": 0
|
719 |
-
},
|
720 |
-
"harness|hendrycksTest-econometrics|5": {
|
721 |
-
"hashes": {
|
722 |
-
"hash_examples": "64d3623b0bfaa43f",
|
723 |
-
"hash_full_prompts": "612f340fae41338d",
|
724 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
725 |
-
"hash_cont_tokens": "a9b1f761089f6acc"
|
726 |
-
},
|
727 |
-
"truncated": 0,
|
728 |
-
"non-truncated": 456,
|
729 |
-
"padded": 456,
|
730 |
-
"non-padded": 0,
|
731 |
-
"effective_few_shots": 5.0,
|
732 |
-
"num_truncated_few_shots": 0
|
733 |
-
},
|
734 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
735 |
-
"hashes": {
|
736 |
-
"hash_examples": "e98f51780c674d7e",
|
737 |
-
"hash_full_prompts": "10275b312d812ae6",
|
738 |
-
"hash_input_tokens": "1590c84291399be8",
|
739 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
740 |
-
},
|
741 |
-
"truncated": 0,
|
742 |
-
"non-truncated": 580,
|
743 |
-
"padded": 580,
|
744 |
-
"non-padded": 0,
|
745 |
-
"effective_few_shots": 5.0,
|
746 |
-
"num_truncated_few_shots": 0
|
747 |
-
},
|
748 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
749 |
-
"hashes": {
|
750 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
751 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
752 |
-
"hash_input_tokens": "3269597f715b0da1",
|
753 |
-
"hash_cont_tokens": "eb2d5002052b5bc5"
|
754 |
-
},
|
755 |
-
"truncated": 0,
|
756 |
-
"non-truncated": 1512,
|
757 |
-
"padded": 1512,
|
758 |
-
"non-padded": 0,
|
759 |
-
"effective_few_shots": 5.0,
|
760 |
-
"num_truncated_few_shots": 0
|
761 |
-
},
|
762 |
-
"harness|hendrycksTest-formal_logic|5": {
|
763 |
-
"hashes": {
|
764 |
-
"hash_examples": "5a6525665f63ea72",
|
765 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
766 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
767 |
-
"hash_cont_tokens": "9b30dc19c9b62f60"
|
768 |
-
},
|
769 |
-
"truncated": 0,
|
770 |
-
"non-truncated": 504,
|
771 |
-
"padded": 504,
|
772 |
-
"non-padded": 0,
|
773 |
-
"effective_few_shots": 5.0,
|
774 |
-
"num_truncated_few_shots": 0
|
775 |
-
},
|
776 |
-
"harness|hendrycksTest-global_facts|5": {
|
777 |
-
"hashes": {
|
778 |
-
"hash_examples": "371d70d743b2b89b",
|
779 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
780 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
781 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
782 |
-
},
|
783 |
-
"truncated": 0,
|
784 |
-
"non-truncated": 400,
|
785 |
-
"padded": 400,
|
786 |
-
"non-padded": 0,
|
787 |
-
"effective_few_shots": 5.0,
|
788 |
-
"num_truncated_few_shots": 0
|
789 |
-
},
|
790 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
791 |
-
"hashes": {
|
792 |
-
"hash_examples": "a79e1018b1674052",
|
793 |
-
"hash_full_prompts": "e624e26ede922561",
|
794 |
-
"hash_input_tokens": "24423acb928db768",
|
795 |
-
"hash_cont_tokens": "74217a4e2868536f"
|
796 |
-
},
|
797 |
-
"truncated": 0,
|
798 |
-
"non-truncated": 1240,
|
799 |
-
"padded": 1240,
|
800 |
-
"non-padded": 0,
|
801 |
-
"effective_few_shots": 5.0,
|
802 |
-
"num_truncated_few_shots": 0
|
803 |
-
},
|
804 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
805 |
-
"hashes": {
|
806 |
-
"hash_examples": "44bfc25c389f0e03",
|
807 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
808 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
809 |
-
"hash_cont_tokens": "bf39544be0ebf000"
|
810 |
-
},
|
811 |
-
"truncated": 0,
|
812 |
-
"non-truncated": 812,
|
813 |
-
"padded": 812,
|
814 |
-
"non-padded": 0,
|
815 |
-
"effective_few_shots": 5.0,
|
816 |
-
"num_truncated_few_shots": 0
|
817 |
-
},
|
818 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
819 |
-
"hashes": {
|
820 |
-
"hash_examples": "8b8cdb1084f24169",
|
821 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
822 |
-
"hash_input_tokens": "8c34e0f2bda77358",
|
823 |
-
"hash_cont_tokens": "43570b3948564b64"
|
824 |
-
},
|
825 |
-
"truncated": 0,
|
826 |
-
"non-truncated": 400,
|
827 |
-
"padded": 400,
|
828 |
-
"non-padded": 0,
|
829 |
-
"effective_few_shots": 5.0,
|
830 |
-
"num_truncated_few_shots": 0
|
831 |
-
},
|
832 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
833 |
-
"hashes": {
|
834 |
-
"hash_examples": "11cd32d0ef440171",
|
835 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
836 |
-
"hash_input_tokens": "f1f73dd687da18d7",
|
837 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
838 |
-
},
|
839 |
-
"truncated": 660,
|
840 |
-
"non-truncated": 0,
|
841 |
-
"padded": 0,
|
842 |
-
"non-padded": 660,
|
843 |
-
"effective_few_shots": 5.0,
|
844 |
-
"num_truncated_few_shots": 0
|
845 |
-
},
|
846 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
847 |
-
"hashes": {
|
848 |
-
"hash_examples": "b60019b9e80b642f",
|
849 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
850 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
851 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
852 |
-
},
|
853 |
-
"truncated": 0,
|
854 |
-
"non-truncated": 792,
|
855 |
-
"padded": 792,
|
856 |
-
"non-padded": 0,
|
857 |
-
"effective_few_shots": 5.0,
|
858 |
-
"num_truncated_few_shots": 0
|
859 |
-
},
|
860 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
861 |
-
"hashes": {
|
862 |
-
"hash_examples": "d221ec983d143dc3",
|
863 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
864 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
865 |
-
"hash_cont_tokens": "50ab225c2f535210"
|
866 |
-
},
|
867 |
-
"truncated": 0,
|
868 |
-
"non-truncated": 772,
|
869 |
-
"padded": 772,
|
870 |
-
"non-padded": 0,
|
871 |
-
"effective_few_shots": 5.0,
|
872 |
-
"num_truncated_few_shots": 0
|
873 |
-
},
|
874 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
875 |
-
"hashes": {
|
876 |
-
"hash_examples": "59c2915cacfd3fbb",
|
877 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
878 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
879 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
880 |
-
},
|
881 |
-
"truncated": 0,
|
882 |
-
"non-truncated": 1560,
|
883 |
-
"padded": 1560,
|
884 |
-
"non-padded": 0,
|
885 |
-
"effective_few_shots": 5.0,
|
886 |
-
"num_truncated_few_shots": 0
|
887 |
-
},
|
888 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
889 |
-
"hashes": {
|
890 |
-
"hash_examples": "1f8ac897608de342",
|
891 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
892 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
893 |
-
"hash_cont_tokens": "1194078d4e38c984"
|
894 |
-
},
|
895 |
-
"truncated": 0,
|
896 |
-
"non-truncated": 1080,
|
897 |
-
"padded": 1080,
|
898 |
-
"non-padded": 0,
|
899 |
-
"effective_few_shots": 5.0,
|
900 |
-
"num_truncated_few_shots": 0
|
901 |
-
},
|
902 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
903 |
-
"hashes": {
|
904 |
-
"hash_examples": "ead6a0f2f6c83370",
|
905 |
-
"hash_full_prompts": "bfc393381298609e",
|
906 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
907 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
908 |
-
},
|
909 |
-
"truncated": 0,
|
910 |
-
"non-truncated": 952,
|
911 |
-
"padded": 952,
|
912 |
-
"non-padded": 0,
|
913 |
-
"effective_few_shots": 5.0,
|
914 |
-
"num_truncated_few_shots": 0
|
915 |
-
},
|
916 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
917 |
-
"hashes": {
|
918 |
-
"hash_examples": "c3f2025990afec64",
|
919 |
-
"hash_full_prompts": "fc78b4997e436734",
|
920 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
921 |
-
"hash_cont_tokens": "6296151cf7fee15c"
|
922 |
-
},
|
923 |
-
"truncated": 0,
|
924 |
-
"non-truncated": 604,
|
925 |
-
"padded": 604,
|
926 |
-
"non-padded": 0,
|
927 |
-
"effective_few_shots": 5.0,
|
928 |
-
"num_truncated_few_shots": 0
|
929 |
-
},
|
930 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
931 |
-
"hashes": {
|
932 |
-
"hash_examples": "21f8aab618f6d636",
|
933 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
934 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
935 |
-
"hash_cont_tokens": "a490d3db0ea5935a"
|
936 |
-
},
|
937 |
-
"truncated": 0,
|
938 |
-
"non-truncated": 2180,
|
939 |
-
"padded": 2180,
|
940 |
-
"non-padded": 0,
|
941 |
-
"effective_few_shots": 5.0,
|
942 |
-
"num_truncated_few_shots": 0
|
943 |
-
},
|
944 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
945 |
-
"hashes": {
|
946 |
-
"hash_examples": "2386a60a11fc5de3",
|
947 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
948 |
-
"hash_input_tokens": "424b02981230ee83",
|
949 |
-
"hash_cont_tokens": "6830ef7d0325d7ef"
|
950 |
-
},
|
951 |
-
"truncated": 0,
|
952 |
-
"non-truncated": 864,
|
953 |
-
"padded": 864,
|
954 |
-
"non-padded": 0,
|
955 |
-
"effective_few_shots": 5.0,
|
956 |
-
"num_truncated_few_shots": 0
|
957 |
-
},
|
958 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
959 |
-
"hashes": {
|
960 |
-
"hash_examples": "74961543be40f04f",
|
961 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
962 |
-
"hash_input_tokens": "50c9ff438c85a69e",
|
963 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
964 |
-
},
|
965 |
-
"truncated": 816,
|
966 |
-
"non-truncated": 0,
|
967 |
-
"padded": 0,
|
968 |
-
"non-padded": 816,
|
969 |
-
"effective_few_shots": 5.0,
|
970 |
-
"num_truncated_few_shots": 0
|
971 |
-
},
|
972 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
973 |
-
"hashes": {
|
974 |
-
"hash_examples": "2ad2f6b7198b2234",
|
975 |
-
"hash_full_prompts": "11845057459afd72",
|
976 |
-
"hash_input_tokens": "054824cc474caef5",
|
977 |
-
"hash_cont_tokens": "e0203e3fc1bb0500"
|
978 |
-
},
|
979 |
-
"truncated": 8,
|
980 |
-
"non-truncated": 940,
|
981 |
-
"padded": 940,
|
982 |
-
"non-padded": 8,
|
983 |
-
"effective_few_shots": 5.0,
|
984 |
-
"num_truncated_few_shots": 0
|
985 |
-
},
|
986 |
-
"harness|hendrycksTest-human_aging|5": {
|
987 |
-
"hashes": {
|
988 |
-
"hash_examples": "1a7199dc733e779b",
|
989 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
990 |
-
"hash_input_tokens": "541a75f071dcf579",
|
991 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
992 |
-
},
|
993 |
-
"truncated": 0,
|
994 |
-
"non-truncated": 892,
|
995 |
-
"padded": 892,
|
996 |
-
"non-padded": 0,
|
997 |
-
"effective_few_shots": 5.0,
|
998 |
-
"num_truncated_few_shots": 0
|
999 |
-
},
|
1000 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1001 |
-
"hashes": {
|
1002 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1003 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1004 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
1005 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
1006 |
-
},
|
1007 |
-
"truncated": 0,
|
1008 |
-
"non-truncated": 524,
|
1009 |
-
"padded": 524,
|
1010 |
-
"non-padded": 0,
|
1011 |
-
"effective_few_shots": 5.0,
|
1012 |
-
"num_truncated_few_shots": 0
|
1013 |
-
},
|
1014 |
-
"harness|hendrycksTest-international_law|5": {
|
1015 |
-
"hashes": {
|
1016 |
-
"hash_examples": "1300bfd0dfc59114",
|
1017 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1018 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
1019 |
-
"hash_cont_tokens": "63435df622d5437b"
|
1020 |
-
},
|
1021 |
-
"truncated": 0,
|
1022 |
-
"non-truncated": 484,
|
1023 |
-
"padded": 484,
|
1024 |
-
"non-padded": 0,
|
1025 |
-
"effective_few_shots": 5.0,
|
1026 |
-
"num_truncated_few_shots": 0
|
1027 |
-
},
|
1028 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1029 |
-
"hashes": {
|
1030 |
-
"hash_examples": "083b1e4904c48dc2",
|
1031 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1032 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
1033 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
1034 |
-
},
|
1035 |
-
"truncated": 0,
|
1036 |
-
"non-truncated": 432,
|
1037 |
-
"padded": 432,
|
1038 |
-
"non-padded": 0,
|
1039 |
-
"effective_few_shots": 5.0,
|
1040 |
-
"num_truncated_few_shots": 0
|
1041 |
-
},
|
1042 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1043 |
-
"hashes": {
|
1044 |
-
"hash_examples": "709128f9926a634c",
|
1045 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1046 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
1047 |
-
"hash_cont_tokens": "5e6ee2ff0404f23c"
|
1048 |
-
},
|
1049 |
-
"truncated": 0,
|
1050 |
-
"non-truncated": 652,
|
1051 |
-
"padded": 648,
|
1052 |
-
"non-padded": 4,
|
1053 |
-
"effective_few_shots": 5.0,
|
1054 |
-
"num_truncated_few_shots": 0
|
1055 |
-
},
|
1056 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1057 |
-
"hashes": {
|
1058 |
-
"hash_examples": "88f22a636029ae47",
|
1059 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1060 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
1061 |
-
"hash_cont_tokens": "c81919424db3b267"
|
1062 |
-
},
|
1063 |
-
"truncated": 0,
|
1064 |
-
"non-truncated": 448,
|
1065 |
-
"padded": 448,
|
1066 |
-
"non-padded": 0,
|
1067 |
-
"effective_few_shots": 5.0,
|
1068 |
-
"num_truncated_few_shots": 0
|
1069 |
-
},
|
1070 |
-
"harness|hendrycksTest-management|5": {
|
1071 |
-
"hashes": {
|
1072 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1073 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1074 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
1075 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
1076 |
-
},
|
1077 |
-
"truncated": 0,
|
1078 |
-
"non-truncated": 412,
|
1079 |
-
"padded": 412,
|
1080 |
-
"non-padded": 0,
|
1081 |
-
"effective_few_shots": 5.0,
|
1082 |
-
"num_truncated_few_shots": 0
|
1083 |
-
},
|
1084 |
-
"harness|hendrycksTest-marketing|5": {
|
1085 |
-
"hashes": {
|
1086 |
-
"hash_examples": "2668953431f91e96",
|
1087 |
-
"hash_full_prompts": "77562bef997c7650",
|
1088 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
1089 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
1090 |
-
},
|
1091 |
-
"truncated": 0,
|
1092 |
-
"non-truncated": 936,
|
1093 |
-
"padded": 936,
|
1094 |
-
"non-padded": 0,
|
1095 |
-
"effective_few_shots": 5.0,
|
1096 |
-
"num_truncated_few_shots": 0
|
1097 |
-
},
|
1098 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1099 |
-
"hashes": {
|
1100 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1101 |
-
"hash_full_prompts": "202139046daa118f",
|
1102 |
-
"hash_input_tokens": "886ca823b41c094a",
|
1103 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
1104 |
-
},
|
1105 |
-
"truncated": 0,
|
1106 |
-
"non-truncated": 400,
|
1107 |
-
"padded": 400,
|
1108 |
-
"non-padded": 0,
|
1109 |
-
"effective_few_shots": 5.0,
|
1110 |
-
"num_truncated_few_shots": 0
|
1111 |
-
},
|
1112 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1113 |
-
"hashes": {
|
1114 |
-
"hash_examples": "41adb694024809c2",
|
1115 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1116 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
1117 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
1118 |
-
},
|
1119 |
-
"truncated": 0,
|
1120 |
-
"non-truncated": 3132,
|
1121 |
-
"padded": 3132,
|
1122 |
-
"non-padded": 0,
|
1123 |
-
"effective_few_shots": 5.0,
|
1124 |
-
"num_truncated_few_shots": 0
|
1125 |
-
},
|
1126 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1127 |
-
"hashes": {
|
1128 |
-
"hash_examples": "3171c13ba3c594c4",
|
1129 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1130 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
1131 |
-
"hash_cont_tokens": "3b8bbe9108e55ce9"
|
1132 |
-
},
|
1133 |
-
"truncated": 0,
|
1134 |
-
"non-truncated": 1384,
|
1135 |
-
"padded": 1354,
|
1136 |
-
"non-padded": 30,
|
1137 |
-
"effective_few_shots": 5.0,
|
1138 |
-
"num_truncated_few_shots": 0
|
1139 |
-
},
|
1140 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1141 |
-
"hashes": {
|
1142 |
-
"hash_examples": "9873e077e83e0546",
|
1143 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1144 |
-
"hash_input_tokens": "3e793631e951f23c",
|
1145 |
-
"hash_cont_tokens": "2eae753a177d5460"
|
1146 |
-
},
|
1147 |
-
"truncated": 0,
|
1148 |
-
"non-truncated": 3580,
|
1149 |
-
"padded": 3580,
|
1150 |
-
"non-padded": 0,
|
1151 |
-
"effective_few_shots": 5.0,
|
1152 |
-
"num_truncated_few_shots": 0
|
1153 |
-
},
|
1154 |
-
"harness|hendrycksTest-nutrition|5": {
|
1155 |
-
"hashes": {
|
1156 |
-
"hash_examples": "7db1d8142ec14323",
|
1157 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1158 |
-
"hash_input_tokens": "59753c2144ea93af",
|
1159 |
-
"hash_cont_tokens": "29771089bd3c65c6"
|
1160 |
-
},
|
1161 |
-
"truncated": 0,
|
1162 |
-
"non-truncated": 1224,
|
1163 |
-
"padded": 1224,
|
1164 |
-
"non-padded": 0,
|
1165 |
-
"effective_few_shots": 5.0,
|
1166 |
-
"num_truncated_few_shots": 0
|
1167 |
-
},
|
1168 |
-
"harness|hendrycksTest-philosophy|5": {
|
1169 |
-
"hashes": {
|
1170 |
-
"hash_examples": "9b455b7d72811cc8",
|
1171 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1172 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
1173 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
1174 |
-
},
|
1175 |
-
"truncated": 0,
|
1176 |
-
"non-truncated": 1244,
|
1177 |
-
"padded": 1244,
|
1178 |
-
"non-padded": 0,
|
1179 |
-
"effective_few_shots": 5.0,
|
1180 |
-
"num_truncated_few_shots": 0
|
1181 |
-
},
|
1182 |
-
"harness|hendrycksTest-prehistory|5": {
|
1183 |
-
"hashes": {
|
1184 |
-
"hash_examples": "8be90d0f538f1560",
|
1185 |
-
"hash_full_prompts": "152187949bcd0921",
|
1186 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
1187 |
-
"hash_cont_tokens": "a789a13af22308bf"
|
1188 |
-
},
|
1189 |
-
"truncated": 0,
|
1190 |
-
"non-truncated": 1296,
|
1191 |
-
"padded": 1296,
|
1192 |
-
"non-padded": 0,
|
1193 |
-
"effective_few_shots": 5.0,
|
1194 |
-
"num_truncated_few_shots": 0
|
1195 |
-
},
|
1196 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1197 |
-
"hashes": {
|
1198 |
-
"hash_examples": "8d377597916cd07e",
|
1199 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1200 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
1201 |
-
"hash_cont_tokens": "5129a9cfb30c5239"
|
1202 |
-
},
|
1203 |
-
"truncated": 0,
|
1204 |
-
"non-truncated": 1128,
|
1205 |
-
"padded": 1128,
|
1206 |
-
"non-padded": 0,
|
1207 |
-
"effective_few_shots": 5.0,
|
1208 |
-
"num_truncated_few_shots": 0
|
1209 |
-
},
|
1210 |
-
"harness|hendrycksTest-professional_law|5": {
|
1211 |
-
"hashes": {
|
1212 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1213 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1214 |
-
"hash_input_tokens": "9178e10bd0763ec4",
|
1215 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
1216 |
-
},
|
1217 |
-
"truncated": 604,
|
1218 |
-
"non-truncated": 5532,
|
1219 |
-
"padded": 5524,
|
1220 |
-
"non-padded": 612,
|
1221 |
-
"effective_few_shots": 5.0,
|
1222 |
-
"num_truncated_few_shots": 0
|
1223 |
-
},
|
1224 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1225 |
-
"hashes": {
|
1226 |
-
"hash_examples": "b20e4e816c1e383e",
|
1227 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1228 |
-
"hash_input_tokens": "f5a22012a54f70ea",
|
1229 |
-
"hash_cont_tokens": "cd82e108370cece8"
|
1230 |
-
},
|
1231 |
-
"truncated": 0,
|
1232 |
-
"non-truncated": 1088,
|
1233 |
-
"padded": 1088,
|
1234 |
-
"non-padded": 0,
|
1235 |
-
"effective_few_shots": 5.0,
|
1236 |
-
"num_truncated_few_shots": 0
|
1237 |
-
},
|
1238 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1239 |
-
"hashes": {
|
1240 |
-
"hash_examples": "d45b73b22f9cc039",
|
1241 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1242 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
1243 |
-
"hash_cont_tokens": "61ef0c8a87f9c92d"
|
1244 |
-
},
|
1245 |
-
"truncated": 0,
|
1246 |
-
"non-truncated": 2448,
|
1247 |
-
"padded": 2448,
|
1248 |
-
"non-padded": 0,
|
1249 |
-
"effective_few_shots": 5.0,
|
1250 |
-
"num_truncated_few_shots": 0
|
1251 |
-
},
|
1252 |
-
"harness|hendrycksTest-public_relations|5": {
|
1253 |
-
"hashes": {
|
1254 |
-
"hash_examples": "0d25072e1761652a",
|
1255 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1256 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
1257 |
-
"hash_cont_tokens": "568f585a259965c1"
|
1258 |
-
},
|
1259 |
-
"truncated": 0,
|
1260 |
-
"non-truncated": 440,
|
1261 |
-
"padded": 440,
|
1262 |
-
"non-padded": 0,
|
1263 |
-
"effective_few_shots": 5.0,
|
1264 |
-
"num_truncated_few_shots": 0
|
1265 |
-
},
|
1266 |
-
"harness|hendrycksTest-security_studies|5": {
|
1267 |
-
"hashes": {
|
1268 |
-
"hash_examples": "62bb8197e63d60d4",
|
1269 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1270 |
-
"hash_input_tokens": "d49711415961ced7",
|
1271 |
-
"hash_cont_tokens": "d70cfe096d4fb7bd"
|
1272 |
-
},
|
1273 |
-
"truncated": 0,
|
1274 |
-
"non-truncated": 980,
|
1275 |
-
"padded": 980,
|
1276 |
-
"non-padded": 0,
|
1277 |
-
"effective_few_shots": 5.0,
|
1278 |
-
"num_truncated_few_shots": 0
|
1279 |
-
},
|
1280 |
-
"harness|hendrycksTest-sociology|5": {
|
1281 |
-
"hashes": {
|
1282 |
-
"hash_examples": "e7959df87dea8672",
|
1283 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1284 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
1285 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
1286 |
-
},
|
1287 |
-
"truncated": 0,
|
1288 |
-
"non-truncated": 804,
|
1289 |
-
"padded": 804,
|
1290 |
-
"non-padded": 0,
|
1291 |
-
"effective_few_shots": 5.0,
|
1292 |
-
"num_truncated_few_shots": 0
|
1293 |
-
},
|
1294 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1295 |
-
"hashes": {
|
1296 |
-
"hash_examples": "4a56a01ddca44dca",
|
1297 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1298 |
-
"hash_input_tokens": "42054621e718dbee",
|
1299 |
-
"hash_cont_tokens": "2568d0e8e36fa959"
|
1300 |
-
},
|
1301 |
-
"truncated": 0,
|
1302 |
-
"non-truncated": 400,
|
1303 |
-
"padded": 400,
|
1304 |
-
"non-padded": 0,
|
1305 |
-
"effective_few_shots": 5.0,
|
1306 |
-
"num_truncated_few_shots": 0
|
1307 |
-
},
|
1308 |
-
"harness|hendrycksTest-virology|5": {
|
1309 |
-
"hashes": {
|
1310 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1311 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1312 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
1313 |
-
"hash_cont_tokens": "c178cccd753d9bc5"
|
1314 |
-
},
|
1315 |
-
"truncated": 0,
|
1316 |
-
"non-truncated": 664,
|
1317 |
-
"padded": 664,
|
1318 |
-
"non-padded": 0,
|
1319 |
-
"effective_few_shots": 5.0,
|
1320 |
-
"num_truncated_few_shots": 0
|
1321 |
-
},
|
1322 |
-
"harness|hendrycksTest-world_religions|5": {
|
1323 |
-
"hashes": {
|
1324 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1325 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1326 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
1327 |
-
"hash_cont_tokens": "0a3a3ea5ef49d19c"
|
1328 |
-
},
|
1329 |
-
"truncated": 0,
|
1330 |
-
"non-truncated": 684,
|
1331 |
-
"padded": 684,
|
1332 |
-
"non-padded": 0,
|
1333 |
-
"effective_few_shots": 5.0,
|
1334 |
-
"num_truncated_few_shots": 0
|
1335 |
-
},
|
1336 |
-
"harness|truthfulqa:mc|0": {
|
1337 |
-
"hashes": {
|
1338 |
-
"hash_examples": "23176c0531c7b867",
|
1339 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1340 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
1341 |
-
"hash_cont_tokens": "6d1691881e252df0"
|
1342 |
-
},
|
1343 |
-
"truncated": 0,
|
1344 |
-
"non-truncated": 9996,
|
1345 |
-
"padded": 9996,
|
1346 |
-
"non-padded": 0,
|
1347 |
-
"effective_few_shots": 0.0,
|
1348 |
-
"num_truncated_few_shots": 0
|
1349 |
-
}
|
1350 |
-
},
|
1351 |
-
"summary_general": {
|
1352 |
-
"hashes": {
|
1353 |
-
"hash_examples": "d84d18e9a963753d",
|
1354 |
-
"hash_full_prompts": "12b540783521a8e6",
|
1355 |
-
"hash_input_tokens": "6fecf578c508db6a",
|
1356 |
-
"hash_cont_tokens": "f4b7b7f3a2788768"
|
1357 |
-
},
|
1358 |
-
"total_evaluation_time_secondes": "26378.668837547302",
|
1359 |
-
"truncated": 2088,
|
1360 |
-
"non-truncated": 108931,
|
1361 |
-
"padded": 108834,
|
1362 |
-
"non-padded": 2185,
|
1363 |
-
"num_truncated_few_shots": 0
|
1364 |
-
}
|
1365 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/Llama-2-70b-instruct/results_2023-10-17T12-48-24.237609.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/Llama-2-70b-instruct",
|
4 |
-
"model_sha": "4e73109f096f1eae71b184f4581965504b2e7448",
|
5 |
-
"model_size": "128.56 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.49989513422818793,
|
17 |
-
"em_stderr": 0.005120467878578845,
|
18 |
-
"f1": 0.5841736577181234,
|
19 |
-
"f1_stderr": 0.004671177225967014
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.32221379833206976,
|
23 |
-
"acc_stderr": 0.01287243548118878
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.8287292817679558,
|
27 |
-
"acc_stderr": 0.010588417294962526
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.49989513422818793,
|
31 |
-
"em_stderr": 0.005120467878578845,
|
32 |
-
"f1": 0.5841736577181234,
|
33 |
-
"f1_stderr": 0.004671177225967014,
|
34 |
-
"acc": 0.5754715400500128,
|
35 |
-
"acc_stderr": 0.011730426388075654
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"harness|drop|3": 1,
|
40 |
-
"harness|gsm8k|5": 0,
|
41 |
-
"harness|winogrande|5": 0,
|
42 |
-
"all": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "61b608e0b5ceed76",
|
55 |
-
"hash_cont_tokens": "7002267c913b5ac4"
|
56 |
-
},
|
57 |
-
"truncated": 1263,
|
58 |
-
"non-truncated": 8273,
|
59 |
-
"padded": 0,
|
60 |
-
"non-padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "6bb254e893ddfe4a"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non-truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non-padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non-truncated": 2534,
|
87 |
-
"padded": 2432,
|
88 |
-
"non-padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "80afe720f936f8d2",
|
98 |
-
"hash_cont_tokens": "21a6cde0a3dbfc7c"
|
99 |
-
},
|
100 |
-
"total_evaluation_time_secondes": "135319.24827551842",
|
101 |
-
"truncated": 1263,
|
102 |
-
"non-truncated": 12126,
|
103 |
-
"padded": 2432,
|
104 |
-
"non-padded": 10957,
|
105 |
-
"num_truncated_few_shots": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-0-70b-16bit/results_2023-08-03T01-46-57.047903.json
DELETED
@@ -1,1365 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|arc:challenge|25": {
|
4 |
-
"acc": 0.6732081911262798,
|
5 |
-
"acc_stderr": 0.013706665975587333,
|
6 |
-
"acc_norm": 0.7107508532423208,
|
7 |
-
"acc_norm_stderr": 0.013250012579393441
|
8 |
-
},
|
9 |
-
"harness|hellaswag|10": {
|
10 |
-
"acc": 0.6974706233817964,
|
11 |
-
"acc_stderr": 0.00458414401465495,
|
12 |
-
"acc_norm": 0.8789085839474209,
|
13 |
-
"acc_norm_stderr": 0.0032556675321152857
|
14 |
-
},
|
15 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
16 |
-
"acc": 0.33,
|
17 |
-
"acc_stderr": 0.04725815626252605,
|
18 |
-
"acc_norm": 0.33,
|
19 |
-
"acc_norm_stderr": 0.04725815626252605
|
20 |
-
},
|
21 |
-
"harness|hendrycksTest-anatomy|5": {
|
22 |
-
"acc": 0.6518518518518519,
|
23 |
-
"acc_stderr": 0.041153246103369526,
|
24 |
-
"acc_norm": 0.6518518518518519,
|
25 |
-
"acc_norm_stderr": 0.041153246103369526
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-astronomy|5": {
|
28 |
-
"acc": 0.8421052631578947,
|
29 |
-
"acc_stderr": 0.029674167520101453,
|
30 |
-
"acc_norm": 0.8421052631578947,
|
31 |
-
"acc_norm_stderr": 0.029674167520101453
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-business_ethics|5": {
|
34 |
-
"acc": 0.74,
|
35 |
-
"acc_stderr": 0.044084400227680794,
|
36 |
-
"acc_norm": 0.74,
|
37 |
-
"acc_norm_stderr": 0.044084400227680794
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
40 |
-
"acc": 0.7320754716981132,
|
41 |
-
"acc_stderr": 0.027257260322494845,
|
42 |
-
"acc_norm": 0.7320754716981132,
|
43 |
-
"acc_norm_stderr": 0.027257260322494845
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-college_biology|5": {
|
46 |
-
"acc": 0.8402777777777778,
|
47 |
-
"acc_stderr": 0.030635578972093274,
|
48 |
-
"acc_norm": 0.8402777777777778,
|
49 |
-
"acc_norm_stderr": 0.030635578972093274
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
52 |
-
"acc": 0.49,
|
53 |
-
"acc_stderr": 0.05024183937956912,
|
54 |
-
"acc_norm": 0.49,
|
55 |
-
"acc_norm_stderr": 0.05024183937956912
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
58 |
-
"acc": 0.6,
|
59 |
-
"acc_stderr": 0.04923659639173309,
|
60 |
-
"acc_norm": 0.6,
|
61 |
-
"acc_norm_stderr": 0.04923659639173309
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
64 |
-
"acc": 0.42,
|
65 |
-
"acc_stderr": 0.049604496374885836,
|
66 |
-
"acc_norm": 0.42,
|
67 |
-
"acc_norm_stderr": 0.049604496374885836
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_medicine|5": {
|
70 |
-
"acc": 0.6589595375722543,
|
71 |
-
"acc_stderr": 0.036146654241808254,
|
72 |
-
"acc_norm": 0.6589595375722543,
|
73 |
-
"acc_norm_stderr": 0.036146654241808254
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_physics|5": {
|
76 |
-
"acc": 0.4215686274509804,
|
77 |
-
"acc_stderr": 0.04913595201274498,
|
78 |
-
"acc_norm": 0.4215686274509804,
|
79 |
-
"acc_norm_stderr": 0.04913595201274498
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-computer_security|5": {
|
82 |
-
"acc": 0.77,
|
83 |
-
"acc_stderr": 0.04229525846816507,
|
84 |
-
"acc_norm": 0.77,
|
85 |
-
"acc_norm_stderr": 0.04229525846816507
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
88 |
-
"acc": 0.7063829787234043,
|
89 |
-
"acc_stderr": 0.029771642712491227,
|
90 |
-
"acc_norm": 0.7063829787234043,
|
91 |
-
"acc_norm_stderr": 0.029771642712491227
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-econometrics|5": {
|
94 |
-
"acc": 0.4649122807017544,
|
95 |
-
"acc_stderr": 0.04692008381368909,
|
96 |
-
"acc_norm": 0.4649122807017544,
|
97 |
-
"acc_norm_stderr": 0.04692008381368909
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
100 |
-
"acc": 0.6482758620689655,
|
101 |
-
"acc_stderr": 0.0397923663749741,
|
102 |
-
"acc_norm": 0.6482758620689655,
|
103 |
-
"acc_norm_stderr": 0.0397923663749741
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
106 |
-
"acc": 0.46825396825396826,
|
107 |
-
"acc_stderr": 0.025699352832131792,
|
108 |
-
"acc_norm": 0.46825396825396826,
|
109 |
-
"acc_norm_stderr": 0.025699352832131792
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-formal_logic|5": {
|
112 |
-
"acc": 0.46825396825396826,
|
113 |
-
"acc_stderr": 0.04463112720677173,
|
114 |
-
"acc_norm": 0.46825396825396826,
|
115 |
-
"acc_norm_stderr": 0.04463112720677173
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-global_facts|5": {
|
118 |
-
"acc": 0.47,
|
119 |
-
"acc_stderr": 0.05016135580465919,
|
120 |
-
"acc_norm": 0.47,
|
121 |
-
"acc_norm_stderr": 0.05016135580465919
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
124 |
-
"acc": 0.8096774193548387,
|
125 |
-
"acc_stderr": 0.02233170761182307,
|
126 |
-
"acc_norm": 0.8096774193548387,
|
127 |
-
"acc_norm_stderr": 0.02233170761182307
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
130 |
-
"acc": 0.5615763546798029,
|
131 |
-
"acc_stderr": 0.03491207857486519,
|
132 |
-
"acc_norm": 0.5615763546798029,
|
133 |
-
"acc_norm_stderr": 0.03491207857486519
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
136 |
-
"acc": 0.79,
|
137 |
-
"acc_stderr": 0.040936018074033256,
|
138 |
-
"acc_norm": 0.79,
|
139 |
-
"acc_norm_stderr": 0.040936018074033256
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
142 |
-
"acc": 0.8424242424242424,
|
143 |
-
"acc_stderr": 0.02845038880528436,
|
144 |
-
"acc_norm": 0.8424242424242424,
|
145 |
-
"acc_norm_stderr": 0.02845038880528436
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
148 |
-
"acc": 0.8737373737373737,
|
149 |
-
"acc_stderr": 0.023664359402880242,
|
150 |
-
"acc_norm": 0.8737373737373737,
|
151 |
-
"acc_norm_stderr": 0.023664359402880242
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
154 |
-
"acc": 0.9378238341968912,
|
155 |
-
"acc_stderr": 0.017426974154240528,
|
156 |
-
"acc_norm": 0.9378238341968912,
|
157 |
-
"acc_norm_stderr": 0.017426974154240528
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
160 |
-
"acc": 0.7102564102564103,
|
161 |
-
"acc_stderr": 0.023000628243687968,
|
162 |
-
"acc_norm": 0.7102564102564103,
|
163 |
-
"acc_norm_stderr": 0.023000628243687968
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
166 |
-
"acc": 0.31851851851851853,
|
167 |
-
"acc_stderr": 0.028406533090608463,
|
168 |
-
"acc_norm": 0.31851851851851853,
|
169 |
-
"acc_norm_stderr": 0.028406533090608463
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
172 |
-
"acc": 0.7647058823529411,
|
173 |
-
"acc_stderr": 0.02755361446786381,
|
174 |
-
"acc_norm": 0.7647058823529411,
|
175 |
-
"acc_norm_stderr": 0.02755361446786381
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
178 |
-
"acc": 0.47019867549668876,
|
179 |
-
"acc_stderr": 0.04075224992216979,
|
180 |
-
"acc_norm": 0.47019867549668876,
|
181 |
-
"acc_norm_stderr": 0.04075224992216979
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
184 |
-
"acc": 0.9027522935779817,
|
185 |
-
"acc_stderr": 0.012703533408540366,
|
186 |
-
"acc_norm": 0.9027522935779817,
|
187 |
-
"acc_norm_stderr": 0.012703533408540366
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
190 |
-
"acc": 0.6018518518518519,
|
191 |
-
"acc_stderr": 0.033384734032074016,
|
192 |
-
"acc_norm": 0.6018518518518519,
|
193 |
-
"acc_norm_stderr": 0.033384734032074016
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
196 |
-
"acc": 0.9264705882352942,
|
197 |
-
"acc_stderr": 0.01831885585008968,
|
198 |
-
"acc_norm": 0.9264705882352942,
|
199 |
-
"acc_norm_stderr": 0.01831885585008968
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
202 |
-
"acc": 0.8945147679324894,
|
203 |
-
"acc_stderr": 0.01999556072375854,
|
204 |
-
"acc_norm": 0.8945147679324894,
|
205 |
-
"acc_norm_stderr": 0.01999556072375854
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-human_aging|5": {
|
208 |
-
"acc": 0.7937219730941704,
|
209 |
-
"acc_stderr": 0.02715715047956382,
|
210 |
-
"acc_norm": 0.7937219730941704,
|
211 |
-
"acc_norm_stderr": 0.02715715047956382
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
214 |
-
"acc": 0.8625954198473282,
|
215 |
-
"acc_stderr": 0.030194823996804475,
|
216 |
-
"acc_norm": 0.8625954198473282,
|
217 |
-
"acc_norm_stderr": 0.030194823996804475
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-international_law|5": {
|
220 |
-
"acc": 0.859504132231405,
|
221 |
-
"acc_stderr": 0.03172233426002157,
|
222 |
-
"acc_norm": 0.859504132231405,
|
223 |
-
"acc_norm_stderr": 0.03172233426002157
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
226 |
-
"acc": 0.8240740740740741,
|
227 |
-
"acc_stderr": 0.036809181416738807,
|
228 |
-
"acc_norm": 0.8240740740740741,
|
229 |
-
"acc_norm_stderr": 0.036809181416738807
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
232 |
-
"acc": 0.803680981595092,
|
233 |
-
"acc_stderr": 0.031207970394709218,
|
234 |
-
"acc_norm": 0.803680981595092,
|
235 |
-
"acc_norm_stderr": 0.031207970394709218
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-machine_learning|5": {
|
238 |
-
"acc": 0.5089285714285714,
|
239 |
-
"acc_stderr": 0.04745033255489122,
|
240 |
-
"acc_norm": 0.5089285714285714,
|
241 |
-
"acc_norm_stderr": 0.04745033255489122
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-management|5": {
|
244 |
-
"acc": 0.8252427184466019,
|
245 |
-
"acc_stderr": 0.0376017800602662,
|
246 |
-
"acc_norm": 0.8252427184466019,
|
247 |
-
"acc_norm_stderr": 0.0376017800602662
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-marketing|5": {
|
250 |
-
"acc": 0.9017094017094017,
|
251 |
-
"acc_stderr": 0.019503444900757567,
|
252 |
-
"acc_norm": 0.9017094017094017,
|
253 |
-
"acc_norm_stderr": 0.019503444900757567
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
256 |
-
"acc": 0.71,
|
257 |
-
"acc_stderr": 0.04560480215720684,
|
258 |
-
"acc_norm": 0.71,
|
259 |
-
"acc_norm_stderr": 0.04560480215720684
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
262 |
-
"acc": 0.8684546615581098,
|
263 |
-
"acc_stderr": 0.01208670521425043,
|
264 |
-
"acc_norm": 0.8684546615581098,
|
265 |
-
"acc_norm_stderr": 0.01208670521425043
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
268 |
-
"acc": 0.7803468208092486,
|
269 |
-
"acc_stderr": 0.022289638852617893,
|
270 |
-
"acc_norm": 0.7803468208092486,
|
271 |
-
"acc_norm_stderr": 0.022289638852617893
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
274 |
-
"acc": 0.6044692737430167,
|
275 |
-
"acc_stderr": 0.01635341541007577,
|
276 |
-
"acc_norm": 0.6044692737430167,
|
277 |
-
"acc_norm_stderr": 0.01635341541007577
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-nutrition|5": {
|
280 |
-
"acc": 0.7679738562091504,
|
281 |
-
"acc_stderr": 0.024170840879340873,
|
282 |
-
"acc_norm": 0.7679738562091504,
|
283 |
-
"acc_norm_stderr": 0.024170840879340873
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-philosophy|5": {
|
286 |
-
"acc": 0.7781350482315113,
|
287 |
-
"acc_stderr": 0.02359885829286305,
|
288 |
-
"acc_norm": 0.7781350482315113,
|
289 |
-
"acc_norm_stderr": 0.02359885829286305
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-prehistory|5": {
|
292 |
-
"acc": 0.8333333333333334,
|
293 |
-
"acc_stderr": 0.020736358408060006,
|
294 |
-
"acc_norm": 0.8333333333333334,
|
295 |
-
"acc_norm_stderr": 0.020736358408060006
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
298 |
-
"acc": 0.574468085106383,
|
299 |
-
"acc_stderr": 0.029494827600144366,
|
300 |
-
"acc_norm": 0.574468085106383,
|
301 |
-
"acc_norm_stderr": 0.029494827600144366
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-professional_law|5": {
|
304 |
-
"acc": 0.5521512385919165,
|
305 |
-
"acc_stderr": 0.012700582404768235,
|
306 |
-
"acc_norm": 0.5521512385919165,
|
307 |
-
"acc_norm_stderr": 0.012700582404768235
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
310 |
-
"acc": 0.7389705882352942,
|
311 |
-
"acc_stderr": 0.02667925227010314,
|
312 |
-
"acc_norm": 0.7389705882352942,
|
313 |
-
"acc_norm_stderr": 0.02667925227010314
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
316 |
-
"acc": 0.7647058823529411,
|
317 |
-
"acc_stderr": 0.01716058723504635,
|
318 |
-
"acc_norm": 0.7647058823529411,
|
319 |
-
"acc_norm_stderr": 0.01716058723504635
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-public_relations|5": {
|
322 |
-
"acc": 0.7454545454545455,
|
323 |
-
"acc_stderr": 0.041723430387053825,
|
324 |
-
"acc_norm": 0.7454545454545455,
|
325 |
-
"acc_norm_stderr": 0.041723430387053825
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-security_studies|5": {
|
328 |
-
"acc": 0.8204081632653061,
|
329 |
-
"acc_stderr": 0.024573293589585637,
|
330 |
-
"acc_norm": 0.8204081632653061,
|
331 |
-
"acc_norm_stderr": 0.024573293589585637
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-sociology|5": {
|
334 |
-
"acc": 0.8756218905472637,
|
335 |
-
"acc_stderr": 0.023335401790166327,
|
336 |
-
"acc_norm": 0.8756218905472637,
|
337 |
-
"acc_norm_stderr": 0.023335401790166327
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
340 |
-
"acc": 0.9,
|
341 |
-
"acc_stderr": 0.030151134457776334,
|
342 |
-
"acc_norm": 0.9,
|
343 |
-
"acc_norm_stderr": 0.030151134457776334
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-virology|5": {
|
346 |
-
"acc": 0.5301204819277109,
|
347 |
-
"acc_stderr": 0.03885425420866767,
|
348 |
-
"acc_norm": 0.5301204819277109,
|
349 |
-
"acc_norm_stderr": 0.03885425420866767
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-world_religions|5": {
|
352 |
-
"acc": 0.8771929824561403,
|
353 |
-
"acc_stderr": 0.02517298435015575,
|
354 |
-
"acc_norm": 0.8771929824561403,
|
355 |
-
"acc_norm_stderr": 0.02517298435015575
|
356 |
-
},
|
357 |
-
"harness|truthfulqa:mc|0": {
|
358 |
-
"mc1": 0.44430844553243576,
|
359 |
-
"mc1_stderr": 0.017394586250743173,
|
360 |
-
"mc2": 0.6224972679005382,
|
361 |
-
"mc2_stderr": 0.014880875055625352
|
362 |
-
},
|
363 |
-
"all": {
|
364 |
-
"acc": 0.7050740464217434,
|
365 |
-
"acc_stderr": 0.03085018588043536,
|
366 |
-
"acc_norm": 0.7087855823993987,
|
367 |
-
"acc_norm_stderr": 0.03081992944181276,
|
368 |
-
"mc1": 0.44430844553243576,
|
369 |
-
"mc1_stderr": 0.017394586250743173,
|
370 |
-
"mc2": 0.6224972679005382,
|
371 |
-
"mc2_stderr": 0.014880875055625352
|
372 |
-
}
|
373 |
-
},
|
374 |
-
"versions": {
|
375 |
-
"harness|arc:challenge|25": 0,
|
376 |
-
"harness|hellaswag|10": 0,
|
377 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
378 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
379 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
380 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
381 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
382 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
383 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
384 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
385 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
386 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
387 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
388 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
389 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
390 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
391 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
392 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
393 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
394 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
395 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
396 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
397 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
398 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
399 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
400 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
401 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
402 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
403 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
404 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
405 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
406 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
409 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
410 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
411 |
-
"harness|hendrycksTest-international_law|5": 1,
|
412 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
413 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
414 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
415 |
-
"harness|hendrycksTest-management|5": 1,
|
416 |
-
"harness|hendrycksTest-marketing|5": 1,
|
417 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
418 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
419 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
420 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
421 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
422 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
423 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
424 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
425 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
426 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
427 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
428 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
429 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
430 |
-
"harness|hendrycksTest-sociology|5": 1,
|
431 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
432 |
-
"harness|hendrycksTest-virology|5": 1,
|
433 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
434 |
-
"harness|truthfulqa:mc|0": 1,
|
435 |
-
"all": 0
|
436 |
-
},
|
437 |
-
"config_general": {
|
438 |
-
"model_name": "upstage/SOLAR-0-70b-16bit",
|
439 |
-
"model_sha": "5f9c77b2c0397cf83d2f97740483f107c7109e8c",
|
440 |
-
"model_dtype": "torch.float16",
|
441 |
-
"lighteval_sha": "03c2fad20ff7f5334c33cfee459024b8d7e4a109",
|
442 |
-
"num_few_shot_default": 0,
|
443 |
-
"num_fewshot_seeds": 1,
|
444 |
-
"override_batch_size": 1,
|
445 |
-
"max_samples": null
|
446 |
-
},
|
447 |
-
"config_tasks": {
|
448 |
-
"harness|arc:challenge": "LM Harness task",
|
449 |
-
"harness|hellaswag": "LM Harness task",
|
450 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
451 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
507 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
508 |
-
},
|
509 |
-
"summary_tasks": {
|
510 |
-
"harness|arc:challenge|25": {
|
511 |
-
"hashes": {
|
512 |
-
"hash_examples": "17b0cae357c0259e",
|
513 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
514 |
-
"hash_input_tokens": "3722289b79076c44",
|
515 |
-
"hash_cont_tokens": "ede2b335438f08e9"
|
516 |
-
},
|
517 |
-
"truncated": 0,
|
518 |
-
"non-truncated": 4687,
|
519 |
-
"padded": 4687,
|
520 |
-
"non-padded": 0,
|
521 |
-
"effective_few_shots": 25.0,
|
522 |
-
"num_truncated_few_shots": 0
|
523 |
-
},
|
524 |
-
"harness|hellaswag|10": {
|
525 |
-
"hashes": {
|
526 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
527 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
528 |
-
"hash_input_tokens": "ececd684171f1ef2",
|
529 |
-
"hash_cont_tokens": "b41cf1ad182d68d5"
|
530 |
-
},
|
531 |
-
"truncated": 0,
|
532 |
-
"non-truncated": 40168,
|
533 |
-
"padded": 40113,
|
534 |
-
"non-padded": 55,
|
535 |
-
"effective_few_shots": 10.0,
|
536 |
-
"num_truncated_few_shots": 0
|
537 |
-
},
|
538 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
539 |
-
"hashes": {
|
540 |
-
"hash_examples": "280f9f325b40559a",
|
541 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
542 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
543 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
544 |
-
},
|
545 |
-
"truncated": 0,
|
546 |
-
"non-truncated": 400,
|
547 |
-
"padded": 400,
|
548 |
-
"non-padded": 0,
|
549 |
-
"effective_few_shots": 5.0,
|
550 |
-
"num_truncated_few_shots": 0
|
551 |
-
},
|
552 |
-
"harness|hendrycksTest-anatomy|5": {
|
553 |
-
"hashes": {
|
554 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
555 |
-
"hash_full_prompts": "516f74bef25df620",
|
556 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
557 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
558 |
-
},
|
559 |
-
"truncated": 0,
|
560 |
-
"non-truncated": 540,
|
561 |
-
"padded": 540,
|
562 |
-
"non-padded": 0,
|
563 |
-
"effective_few_shots": 5.0,
|
564 |
-
"num_truncated_few_shots": 0
|
565 |
-
},
|
566 |
-
"harness|hendrycksTest-astronomy|5": {
|
567 |
-
"hashes": {
|
568 |
-
"hash_examples": "7d587b908da4d762",
|
569 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
570 |
-
"hash_input_tokens": "277a7b1fad566940",
|
571 |
-
"hash_cont_tokens": "238bd86950544b29"
|
572 |
-
},
|
573 |
-
"truncated": 0,
|
574 |
-
"non-truncated": 608,
|
575 |
-
"padded": 608,
|
576 |
-
"non-padded": 0,
|
577 |
-
"effective_few_shots": 5.0,
|
578 |
-
"num_truncated_few_shots": 0
|
579 |
-
},
|
580 |
-
"harness|hendrycksTest-business_ethics|5": {
|
581 |
-
"hashes": {
|
582 |
-
"hash_examples": "33e51740670de686",
|
583 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
584 |
-
"hash_input_tokens": "ba552605bc116de5",
|
585 |
-
"hash_cont_tokens": "f9d6d2a7d7e9a041"
|
586 |
-
},
|
587 |
-
"truncated": 0,
|
588 |
-
"non-truncated": 400,
|
589 |
-
"padded": 400,
|
590 |
-
"non-padded": 0,
|
591 |
-
"effective_few_shots": 5.0,
|
592 |
-
"num_truncated_few_shots": 0
|
593 |
-
},
|
594 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
595 |
-
"hashes": {
|
596 |
-
"hash_examples": "f3366dbe7eefffa4",
|
597 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
598 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
599 |
-
"hash_cont_tokens": "6af58623d0d5fbcd"
|
600 |
-
},
|
601 |
-
"truncated": 0,
|
602 |
-
"non-truncated": 1060,
|
603 |
-
"padded": 1060,
|
604 |
-
"non-padded": 0,
|
605 |
-
"effective_few_shots": 5.0,
|
606 |
-
"num_truncated_few_shots": 0
|
607 |
-
},
|
608 |
-
"harness|hendrycksTest-college_biology|5": {
|
609 |
-
"hashes": {
|
610 |
-
"hash_examples": "ca2b6753a0193e7f",
|
611 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
612 |
-
"hash_input_tokens": "da036601573942e2",
|
613 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
614 |
-
},
|
615 |
-
"truncated": 0,
|
616 |
-
"non-truncated": 576,
|
617 |
-
"padded": 576,
|
618 |
-
"non-padded": 0,
|
619 |
-
"effective_few_shots": 5.0,
|
620 |
-
"num_truncated_few_shots": 0
|
621 |
-
},
|
622 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
623 |
-
"hashes": {
|
624 |
-
"hash_examples": "22ff85f1d34f42d1",
|
625 |
-
"hash_full_prompts": "242c9be6da583e95",
|
626 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
627 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
628 |
-
},
|
629 |
-
"truncated": 0,
|
630 |
-
"non-truncated": 400,
|
631 |
-
"padded": 400,
|
632 |
-
"non-padded": 0,
|
633 |
-
"effective_few_shots": 5.0,
|
634 |
-
"num_truncated_few_shots": 0
|
635 |
-
},
|
636 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
637 |
-
"hashes": {
|
638 |
-
"hash_examples": "30318289d717a5cf",
|
639 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
640 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
641 |
-
"hash_cont_tokens": "1ba0c71186b1505e"
|
642 |
-
},
|
643 |
-
"truncated": 0,
|
644 |
-
"non-truncated": 400,
|
645 |
-
"padded": 400,
|
646 |
-
"non-padded": 0,
|
647 |
-
"effective_few_shots": 5.0,
|
648 |
-
"num_truncated_few_shots": 0
|
649 |
-
},
|
650 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
651 |
-
"hashes": {
|
652 |
-
"hash_examples": "4944d1f0b6b5d911",
|
653 |
-
"hash_full_prompts": "770bc4281c973190",
|
654 |
-
"hash_input_tokens": "614054d17109a25d",
|
655 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
656 |
-
},
|
657 |
-
"truncated": 0,
|
658 |
-
"non-truncated": 400,
|
659 |
-
"padded": 400,
|
660 |
-
"non-padded": 0,
|
661 |
-
"effective_few_shots": 5.0,
|
662 |
-
"num_truncated_few_shots": 0
|
663 |
-
},
|
664 |
-
"harness|hendrycksTest-college_medicine|5": {
|
665 |
-
"hashes": {
|
666 |
-
"hash_examples": "dd69cc33381275af",
|
667 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
668 |
-
"hash_input_tokens": "081bb2b524defd1c",
|
669 |
-
"hash_cont_tokens": "702fb6d82ff0d6ac"
|
670 |
-
},
|
671 |
-
"truncated": 0,
|
672 |
-
"non-truncated": 692,
|
673 |
-
"padded": 692,
|
674 |
-
"non-padded": 0,
|
675 |
-
"effective_few_shots": 5.0,
|
676 |
-
"num_truncated_few_shots": 0
|
677 |
-
},
|
678 |
-
"harness|hendrycksTest-college_physics|5": {
|
679 |
-
"hashes": {
|
680 |
-
"hash_examples": "875dd26d22655b0d",
|
681 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
682 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
683 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
684 |
-
},
|
685 |
-
"truncated": 0,
|
686 |
-
"non-truncated": 408,
|
687 |
-
"padded": 408,
|
688 |
-
"non-padded": 0,
|
689 |
-
"effective_few_shots": 5.0,
|
690 |
-
"num_truncated_few_shots": 0
|
691 |
-
},
|
692 |
-
"harness|hendrycksTest-computer_security|5": {
|
693 |
-
"hashes": {
|
694 |
-
"hash_examples": "006451eedc0ededb",
|
695 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
696 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
697 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
698 |
-
},
|
699 |
-
"truncated": 0,
|
700 |
-
"non-truncated": 400,
|
701 |
-
"padded": 400,
|
702 |
-
"non-padded": 0,
|
703 |
-
"effective_few_shots": 5.0,
|
704 |
-
"num_truncated_few_shots": 0
|
705 |
-
},
|
706 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
707 |
-
"hashes": {
|
708 |
-
"hash_examples": "8874ece872d2ca4c",
|
709 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
710 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
711 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
712 |
-
},
|
713 |
-
"truncated": 0,
|
714 |
-
"non-truncated": 940,
|
715 |
-
"padded": 940,
|
716 |
-
"non-padded": 0,
|
717 |
-
"effective_few_shots": 5.0,
|
718 |
-
"num_truncated_few_shots": 0
|
719 |
-
},
|
720 |
-
"harness|hendrycksTest-econometrics|5": {
|
721 |
-
"hashes": {
|
722 |
-
"hash_examples": "64d3623b0bfaa43f",
|
723 |
-
"hash_full_prompts": "612f340fae41338d",
|
724 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
725 |
-
"hash_cont_tokens": "a9b1f761089f6acc"
|
726 |
-
},
|
727 |
-
"truncated": 0,
|
728 |
-
"non-truncated": 456,
|
729 |
-
"padded": 456,
|
730 |
-
"non-padded": 0,
|
731 |
-
"effective_few_shots": 5.0,
|
732 |
-
"num_truncated_few_shots": 0
|
733 |
-
},
|
734 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
735 |
-
"hashes": {
|
736 |
-
"hash_examples": "e98f51780c674d7e",
|
737 |
-
"hash_full_prompts": "10275b312d812ae6",
|
738 |
-
"hash_input_tokens": "1590c84291399be8",
|
739 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
740 |
-
},
|
741 |
-
"truncated": 0,
|
742 |
-
"non-truncated": 580,
|
743 |
-
"padded": 580,
|
744 |
-
"non-padded": 0,
|
745 |
-
"effective_few_shots": 5.0,
|
746 |
-
"num_truncated_few_shots": 0
|
747 |
-
},
|
748 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
749 |
-
"hashes": {
|
750 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
751 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
752 |
-
"hash_input_tokens": "3269597f715b0da1",
|
753 |
-
"hash_cont_tokens": "eb2d5002052b5bc5"
|
754 |
-
},
|
755 |
-
"truncated": 0,
|
756 |
-
"non-truncated": 1512,
|
757 |
-
"padded": 1512,
|
758 |
-
"non-padded": 0,
|
759 |
-
"effective_few_shots": 5.0,
|
760 |
-
"num_truncated_few_shots": 0
|
761 |
-
},
|
762 |
-
"harness|hendrycksTest-formal_logic|5": {
|
763 |
-
"hashes": {
|
764 |
-
"hash_examples": "5a6525665f63ea72",
|
765 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
766 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
767 |
-
"hash_cont_tokens": "9b30dc19c9b62f60"
|
768 |
-
},
|
769 |
-
"truncated": 0,
|
770 |
-
"non-truncated": 504,
|
771 |
-
"padded": 504,
|
772 |
-
"non-padded": 0,
|
773 |
-
"effective_few_shots": 5.0,
|
774 |
-
"num_truncated_few_shots": 0
|
775 |
-
},
|
776 |
-
"harness|hendrycksTest-global_facts|5": {
|
777 |
-
"hashes": {
|
778 |
-
"hash_examples": "371d70d743b2b89b",
|
779 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
780 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
781 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
782 |
-
},
|
783 |
-
"truncated": 0,
|
784 |
-
"non-truncated": 400,
|
785 |
-
"padded": 400,
|
786 |
-
"non-padded": 0,
|
787 |
-
"effective_few_shots": 5.0,
|
788 |
-
"num_truncated_few_shots": 0
|
789 |
-
},
|
790 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
791 |
-
"hashes": {
|
792 |
-
"hash_examples": "a79e1018b1674052",
|
793 |
-
"hash_full_prompts": "e624e26ede922561",
|
794 |
-
"hash_input_tokens": "24423acb928db768",
|
795 |
-
"hash_cont_tokens": "74217a4e2868536f"
|
796 |
-
},
|
797 |
-
"truncated": 0,
|
798 |
-
"non-truncated": 1240,
|
799 |
-
"padded": 1240,
|
800 |
-
"non-padded": 0,
|
801 |
-
"effective_few_shots": 5.0,
|
802 |
-
"num_truncated_few_shots": 0
|
803 |
-
},
|
804 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
805 |
-
"hashes": {
|
806 |
-
"hash_examples": "44bfc25c389f0e03",
|
807 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
808 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
809 |
-
"hash_cont_tokens": "bf39544be0ebf000"
|
810 |
-
},
|
811 |
-
"truncated": 0,
|
812 |
-
"non-truncated": 812,
|
813 |
-
"padded": 812,
|
814 |
-
"non-padded": 0,
|
815 |
-
"effective_few_shots": 5.0,
|
816 |
-
"num_truncated_few_shots": 0
|
817 |
-
},
|
818 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
819 |
-
"hashes": {
|
820 |
-
"hash_examples": "8b8cdb1084f24169",
|
821 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
822 |
-
"hash_input_tokens": "a20a96b44dcc5b30",
|
823 |
-
"hash_cont_tokens": "43570b3948564b64"
|
824 |
-
},
|
825 |
-
"truncated": 0,
|
826 |
-
"non-truncated": 400,
|
827 |
-
"padded": 400,
|
828 |
-
"non-padded": 0,
|
829 |
-
"effective_few_shots": 5.0,
|
830 |
-
"num_truncated_few_shots": 0
|
831 |
-
},
|
832 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
833 |
-
"hashes": {
|
834 |
-
"hash_examples": "11cd32d0ef440171",
|
835 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
836 |
-
"hash_input_tokens": "5002f4ac8b1562ca",
|
837 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
838 |
-
},
|
839 |
-
"truncated": 0,
|
840 |
-
"non-truncated": 660,
|
841 |
-
"padded": 656,
|
842 |
-
"non-padded": 4,
|
843 |
-
"effective_few_shots": 5.0,
|
844 |
-
"num_truncated_few_shots": 0
|
845 |
-
},
|
846 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
847 |
-
"hashes": {
|
848 |
-
"hash_examples": "b60019b9e80b642f",
|
849 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
850 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
851 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
852 |
-
},
|
853 |
-
"truncated": 0,
|
854 |
-
"non-truncated": 792,
|
855 |
-
"padded": 792,
|
856 |
-
"non-padded": 0,
|
857 |
-
"effective_few_shots": 5.0,
|
858 |
-
"num_truncated_few_shots": 0
|
859 |
-
},
|
860 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
861 |
-
"hashes": {
|
862 |
-
"hash_examples": "d221ec983d143dc3",
|
863 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
864 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
865 |
-
"hash_cont_tokens": "50ab225c2f535210"
|
866 |
-
},
|
867 |
-
"truncated": 0,
|
868 |
-
"non-truncated": 772,
|
869 |
-
"padded": 772,
|
870 |
-
"non-padded": 0,
|
871 |
-
"effective_few_shots": 5.0,
|
872 |
-
"num_truncated_few_shots": 0
|
873 |
-
},
|
874 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
875 |
-
"hashes": {
|
876 |
-
"hash_examples": "59c2915cacfd3fbb",
|
877 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
878 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
879 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
880 |
-
},
|
881 |
-
"truncated": 0,
|
882 |
-
"non-truncated": 1560,
|
883 |
-
"padded": 1560,
|
884 |
-
"non-padded": 0,
|
885 |
-
"effective_few_shots": 5.0,
|
886 |
-
"num_truncated_few_shots": 0
|
887 |
-
},
|
888 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
889 |
-
"hashes": {
|
890 |
-
"hash_examples": "1f8ac897608de342",
|
891 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
892 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
893 |
-
"hash_cont_tokens": "1194078d4e38c984"
|
894 |
-
},
|
895 |
-
"truncated": 0,
|
896 |
-
"non-truncated": 1080,
|
897 |
-
"padded": 1080,
|
898 |
-
"non-padded": 0,
|
899 |
-
"effective_few_shots": 5.0,
|
900 |
-
"num_truncated_few_shots": 0
|
901 |
-
},
|
902 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
903 |
-
"hashes": {
|
904 |
-
"hash_examples": "ead6a0f2f6c83370",
|
905 |
-
"hash_full_prompts": "bfc393381298609e",
|
906 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
907 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
908 |
-
},
|
909 |
-
"truncated": 0,
|
910 |
-
"non-truncated": 952,
|
911 |
-
"padded": 952,
|
912 |
-
"non-padded": 0,
|
913 |
-
"effective_few_shots": 5.0,
|
914 |
-
"num_truncated_few_shots": 0
|
915 |
-
},
|
916 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
917 |
-
"hashes": {
|
918 |
-
"hash_examples": "c3f2025990afec64",
|
919 |
-
"hash_full_prompts": "fc78b4997e436734",
|
920 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
921 |
-
"hash_cont_tokens": "6296151cf7fee15c"
|
922 |
-
},
|
923 |
-
"truncated": 0,
|
924 |
-
"non-truncated": 604,
|
925 |
-
"padded": 604,
|
926 |
-
"non-padded": 0,
|
927 |
-
"effective_few_shots": 5.0,
|
928 |
-
"num_truncated_few_shots": 0
|
929 |
-
},
|
930 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
931 |
-
"hashes": {
|
932 |
-
"hash_examples": "21f8aab618f6d636",
|
933 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
934 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
935 |
-
"hash_cont_tokens": "a490d3db0ea5935a"
|
936 |
-
},
|
937 |
-
"truncated": 0,
|
938 |
-
"non-truncated": 2180,
|
939 |
-
"padded": 2180,
|
940 |
-
"non-padded": 0,
|
941 |
-
"effective_few_shots": 5.0,
|
942 |
-
"num_truncated_few_shots": 0
|
943 |
-
},
|
944 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
945 |
-
"hashes": {
|
946 |
-
"hash_examples": "2386a60a11fc5de3",
|
947 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
948 |
-
"hash_input_tokens": "400e55b56ee6fbd7",
|
949 |
-
"hash_cont_tokens": "6830ef7d0325d7ef"
|
950 |
-
},
|
951 |
-
"truncated": 0,
|
952 |
-
"non-truncated": 864,
|
953 |
-
"padded": 864,
|
954 |
-
"non-padded": 0,
|
955 |
-
"effective_few_shots": 5.0,
|
956 |
-
"num_truncated_few_shots": 0
|
957 |
-
},
|
958 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
959 |
-
"hashes": {
|
960 |
-
"hash_examples": "74961543be40f04f",
|
961 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
962 |
-
"hash_input_tokens": "c639cce12a46ebad",
|
963 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
964 |
-
},
|
965 |
-
"truncated": 0,
|
966 |
-
"non-truncated": 816,
|
967 |
-
"padded": 816,
|
968 |
-
"non-padded": 0,
|
969 |
-
"effective_few_shots": 5.0,
|
970 |
-
"num_truncated_few_shots": 0
|
971 |
-
},
|
972 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
973 |
-
"hashes": {
|
974 |
-
"hash_examples": "2ad2f6b7198b2234",
|
975 |
-
"hash_full_prompts": "11845057459afd72",
|
976 |
-
"hash_input_tokens": "b9762065cce6f3a6",
|
977 |
-
"hash_cont_tokens": "e0203e3fc1bb0500"
|
978 |
-
},
|
979 |
-
"truncated": 0,
|
980 |
-
"non-truncated": 948,
|
981 |
-
"padded": 948,
|
982 |
-
"non-padded": 0,
|
983 |
-
"effective_few_shots": 5.0,
|
984 |
-
"num_truncated_few_shots": 0
|
985 |
-
},
|
986 |
-
"harness|hendrycksTest-human_aging|5": {
|
987 |
-
"hashes": {
|
988 |
-
"hash_examples": "1a7199dc733e779b",
|
989 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
990 |
-
"hash_input_tokens": "541a75f071dcf579",
|
991 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
992 |
-
},
|
993 |
-
"truncated": 0,
|
994 |
-
"non-truncated": 892,
|
995 |
-
"padded": 892,
|
996 |
-
"non-padded": 0,
|
997 |
-
"effective_few_shots": 5.0,
|
998 |
-
"num_truncated_few_shots": 0
|
999 |
-
},
|
1000 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1001 |
-
"hashes": {
|
1002 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1003 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1004 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
1005 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
1006 |
-
},
|
1007 |
-
"truncated": 0,
|
1008 |
-
"non-truncated": 524,
|
1009 |
-
"padded": 524,
|
1010 |
-
"non-padded": 0,
|
1011 |
-
"effective_few_shots": 5.0,
|
1012 |
-
"num_truncated_few_shots": 0
|
1013 |
-
},
|
1014 |
-
"harness|hendrycksTest-international_law|5": {
|
1015 |
-
"hashes": {
|
1016 |
-
"hash_examples": "1300bfd0dfc59114",
|
1017 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1018 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
1019 |
-
"hash_cont_tokens": "63435df622d5437b"
|
1020 |
-
},
|
1021 |
-
"truncated": 0,
|
1022 |
-
"non-truncated": 484,
|
1023 |
-
"padded": 484,
|
1024 |
-
"non-padded": 0,
|
1025 |
-
"effective_few_shots": 5.0,
|
1026 |
-
"num_truncated_few_shots": 0
|
1027 |
-
},
|
1028 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1029 |
-
"hashes": {
|
1030 |
-
"hash_examples": "083b1e4904c48dc2",
|
1031 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1032 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
1033 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
1034 |
-
},
|
1035 |
-
"truncated": 0,
|
1036 |
-
"non-truncated": 432,
|
1037 |
-
"padded": 432,
|
1038 |
-
"non-padded": 0,
|
1039 |
-
"effective_few_shots": 5.0,
|
1040 |
-
"num_truncated_few_shots": 0
|
1041 |
-
},
|
1042 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1043 |
-
"hashes": {
|
1044 |
-
"hash_examples": "709128f9926a634c",
|
1045 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1046 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
1047 |
-
"hash_cont_tokens": "5e6ee2ff0404f23c"
|
1048 |
-
},
|
1049 |
-
"truncated": 0,
|
1050 |
-
"non-truncated": 652,
|
1051 |
-
"padded": 648,
|
1052 |
-
"non-padded": 4,
|
1053 |
-
"effective_few_shots": 5.0,
|
1054 |
-
"num_truncated_few_shots": 0
|
1055 |
-
},
|
1056 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1057 |
-
"hashes": {
|
1058 |
-
"hash_examples": "88f22a636029ae47",
|
1059 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1060 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
1061 |
-
"hash_cont_tokens": "c81919424db3b267"
|
1062 |
-
},
|
1063 |
-
"truncated": 0,
|
1064 |
-
"non-truncated": 448,
|
1065 |
-
"padded": 448,
|
1066 |
-
"non-padded": 0,
|
1067 |
-
"effective_few_shots": 5.0,
|
1068 |
-
"num_truncated_few_shots": 0
|
1069 |
-
},
|
1070 |
-
"harness|hendrycksTest-management|5": {
|
1071 |
-
"hashes": {
|
1072 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1073 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1074 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
1075 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
1076 |
-
},
|
1077 |
-
"truncated": 0,
|
1078 |
-
"non-truncated": 412,
|
1079 |
-
"padded": 412,
|
1080 |
-
"non-padded": 0,
|
1081 |
-
"effective_few_shots": 5.0,
|
1082 |
-
"num_truncated_few_shots": 0
|
1083 |
-
},
|
1084 |
-
"harness|hendrycksTest-marketing|5": {
|
1085 |
-
"hashes": {
|
1086 |
-
"hash_examples": "2668953431f91e96",
|
1087 |
-
"hash_full_prompts": "77562bef997c7650",
|
1088 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
1089 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
1090 |
-
},
|
1091 |
-
"truncated": 0,
|
1092 |
-
"non-truncated": 936,
|
1093 |
-
"padded": 936,
|
1094 |
-
"non-padded": 0,
|
1095 |
-
"effective_few_shots": 5.0,
|
1096 |
-
"num_truncated_few_shots": 0
|
1097 |
-
},
|
1098 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1099 |
-
"hashes": {
|
1100 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1101 |
-
"hash_full_prompts": "202139046daa118f",
|
1102 |
-
"hash_input_tokens": "886ca823b41c094a",
|
1103 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
1104 |
-
},
|
1105 |
-
"truncated": 0,
|
1106 |
-
"non-truncated": 400,
|
1107 |
-
"padded": 400,
|
1108 |
-
"non-padded": 0,
|
1109 |
-
"effective_few_shots": 5.0,
|
1110 |
-
"num_truncated_few_shots": 0
|
1111 |
-
},
|
1112 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1113 |
-
"hashes": {
|
1114 |
-
"hash_examples": "41adb694024809c2",
|
1115 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1116 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
1117 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
1118 |
-
},
|
1119 |
-
"truncated": 0,
|
1120 |
-
"non-truncated": 3132,
|
1121 |
-
"padded": 3132,
|
1122 |
-
"non-padded": 0,
|
1123 |
-
"effective_few_shots": 5.0,
|
1124 |
-
"num_truncated_few_shots": 0
|
1125 |
-
},
|
1126 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1127 |
-
"hashes": {
|
1128 |
-
"hash_examples": "3171c13ba3c594c4",
|
1129 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1130 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
1131 |
-
"hash_cont_tokens": "3b8bbe9108e55ce9"
|
1132 |
-
},
|
1133 |
-
"truncated": 0,
|
1134 |
-
"non-truncated": 1384,
|
1135 |
-
"padded": 1354,
|
1136 |
-
"non-padded": 30,
|
1137 |
-
"effective_few_shots": 5.0,
|
1138 |
-
"num_truncated_few_shots": 0
|
1139 |
-
},
|
1140 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1141 |
-
"hashes": {
|
1142 |
-
"hash_examples": "9873e077e83e0546",
|
1143 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1144 |
-
"hash_input_tokens": "3e793631e951f23c",
|
1145 |
-
"hash_cont_tokens": "2eae753a177d5460"
|
1146 |
-
},
|
1147 |
-
"truncated": 0,
|
1148 |
-
"non-truncated": 3580,
|
1149 |
-
"padded": 3580,
|
1150 |
-
"non-padded": 0,
|
1151 |
-
"effective_few_shots": 5.0,
|
1152 |
-
"num_truncated_few_shots": 0
|
1153 |
-
},
|
1154 |
-
"harness|hendrycksTest-nutrition|5": {
|
1155 |
-
"hashes": {
|
1156 |
-
"hash_examples": "7db1d8142ec14323",
|
1157 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1158 |
-
"hash_input_tokens": "59753c2144ea93af",
|
1159 |
-
"hash_cont_tokens": "29771089bd3c65c6"
|
1160 |
-
},
|
1161 |
-
"truncated": 0,
|
1162 |
-
"non-truncated": 1224,
|
1163 |
-
"padded": 1224,
|
1164 |
-
"non-padded": 0,
|
1165 |
-
"effective_few_shots": 5.0,
|
1166 |
-
"num_truncated_few_shots": 0
|
1167 |
-
},
|
1168 |
-
"harness|hendrycksTest-philosophy|5": {
|
1169 |
-
"hashes": {
|
1170 |
-
"hash_examples": "9b455b7d72811cc8",
|
1171 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1172 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
1173 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
1174 |
-
},
|
1175 |
-
"truncated": 0,
|
1176 |
-
"non-truncated": 1244,
|
1177 |
-
"padded": 1244,
|
1178 |
-
"non-padded": 0,
|
1179 |
-
"effective_few_shots": 5.0,
|
1180 |
-
"num_truncated_few_shots": 0
|
1181 |
-
},
|
1182 |
-
"harness|hendrycksTest-prehistory|5": {
|
1183 |
-
"hashes": {
|
1184 |
-
"hash_examples": "8be90d0f538f1560",
|
1185 |
-
"hash_full_prompts": "152187949bcd0921",
|
1186 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
1187 |
-
"hash_cont_tokens": "a789a13af22308bf"
|
1188 |
-
},
|
1189 |
-
"truncated": 0,
|
1190 |
-
"non-truncated": 1296,
|
1191 |
-
"padded": 1296,
|
1192 |
-
"non-padded": 0,
|
1193 |
-
"effective_few_shots": 5.0,
|
1194 |
-
"num_truncated_few_shots": 0
|
1195 |
-
},
|
1196 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1197 |
-
"hashes": {
|
1198 |
-
"hash_examples": "8d377597916cd07e",
|
1199 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1200 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
1201 |
-
"hash_cont_tokens": "5129a9cfb30c5239"
|
1202 |
-
},
|
1203 |
-
"truncated": 0,
|
1204 |
-
"non-truncated": 1128,
|
1205 |
-
"padded": 1128,
|
1206 |
-
"non-padded": 0,
|
1207 |
-
"effective_few_shots": 5.0,
|
1208 |
-
"num_truncated_few_shots": 0
|
1209 |
-
},
|
1210 |
-
"harness|hendrycksTest-professional_law|5": {
|
1211 |
-
"hashes": {
|
1212 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1213 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1214 |
-
"hash_input_tokens": "c9f7583fff66d361",
|
1215 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
1216 |
-
},
|
1217 |
-
"truncated": 0,
|
1218 |
-
"non-truncated": 6136,
|
1219 |
-
"padded": 6136,
|
1220 |
-
"non-padded": 0,
|
1221 |
-
"effective_few_shots": 5.0,
|
1222 |
-
"num_truncated_few_shots": 0
|
1223 |
-
},
|
1224 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1225 |
-
"hashes": {
|
1226 |
-
"hash_examples": "b20e4e816c1e383e",
|
1227 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1228 |
-
"hash_input_tokens": "40a933f829116f8d",
|
1229 |
-
"hash_cont_tokens": "cd82e108370cece8"
|
1230 |
-
},
|
1231 |
-
"truncated": 0,
|
1232 |
-
"non-truncated": 1088,
|
1233 |
-
"padded": 1088,
|
1234 |
-
"non-padded": 0,
|
1235 |
-
"effective_few_shots": 5.0,
|
1236 |
-
"num_truncated_few_shots": 0
|
1237 |
-
},
|
1238 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1239 |
-
"hashes": {
|
1240 |
-
"hash_examples": "d45b73b22f9cc039",
|
1241 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1242 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
1243 |
-
"hash_cont_tokens": "61ef0c8a87f9c92d"
|
1244 |
-
},
|
1245 |
-
"truncated": 0,
|
1246 |
-
"non-truncated": 2448,
|
1247 |
-
"padded": 2448,
|
1248 |
-
"non-padded": 0,
|
1249 |
-
"effective_few_shots": 5.0,
|
1250 |
-
"num_truncated_few_shots": 0
|
1251 |
-
},
|
1252 |
-
"harness|hendrycksTest-public_relations|5": {
|
1253 |
-
"hashes": {
|
1254 |
-
"hash_examples": "0d25072e1761652a",
|
1255 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1256 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
1257 |
-
"hash_cont_tokens": "568f585a259965c1"
|
1258 |
-
},
|
1259 |
-
"truncated": 0,
|
1260 |
-
"non-truncated": 440,
|
1261 |
-
"padded": 440,
|
1262 |
-
"non-padded": 0,
|
1263 |
-
"effective_few_shots": 5.0,
|
1264 |
-
"num_truncated_few_shots": 0
|
1265 |
-
},
|
1266 |
-
"harness|hendrycksTest-security_studies|5": {
|
1267 |
-
"hashes": {
|
1268 |
-
"hash_examples": "62bb8197e63d60d4",
|
1269 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1270 |
-
"hash_input_tokens": "32a03f1f22a6e103",
|
1271 |
-
"hash_cont_tokens": "d70cfe096d4fb7bd"
|
1272 |
-
},
|
1273 |
-
"truncated": 0,
|
1274 |
-
"non-truncated": 980,
|
1275 |
-
"padded": 980,
|
1276 |
-
"non-padded": 0,
|
1277 |
-
"effective_few_shots": 5.0,
|
1278 |
-
"num_truncated_few_shots": 0
|
1279 |
-
},
|
1280 |
-
"harness|hendrycksTest-sociology|5": {
|
1281 |
-
"hashes": {
|
1282 |
-
"hash_examples": "e7959df87dea8672",
|
1283 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1284 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
1285 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
1286 |
-
},
|
1287 |
-
"truncated": 0,
|
1288 |
-
"non-truncated": 804,
|
1289 |
-
"padded": 804,
|
1290 |
-
"non-padded": 0,
|
1291 |
-
"effective_few_shots": 5.0,
|
1292 |
-
"num_truncated_few_shots": 0
|
1293 |
-
},
|
1294 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1295 |
-
"hashes": {
|
1296 |
-
"hash_examples": "4a56a01ddca44dca",
|
1297 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1298 |
-
"hash_input_tokens": "42054621e718dbee",
|
1299 |
-
"hash_cont_tokens": "2568d0e8e36fa959"
|
1300 |
-
},
|
1301 |
-
"truncated": 0,
|
1302 |
-
"non-truncated": 400,
|
1303 |
-
"padded": 400,
|
1304 |
-
"non-padded": 0,
|
1305 |
-
"effective_few_shots": 5.0,
|
1306 |
-
"num_truncated_few_shots": 0
|
1307 |
-
},
|
1308 |
-
"harness|hendrycksTest-virology|5": {
|
1309 |
-
"hashes": {
|
1310 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1311 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1312 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
1313 |
-
"hash_cont_tokens": "c178cccd753d9bc5"
|
1314 |
-
},
|
1315 |
-
"truncated": 0,
|
1316 |
-
"non-truncated": 664,
|
1317 |
-
"padded": 664,
|
1318 |
-
"non-padded": 0,
|
1319 |
-
"effective_few_shots": 5.0,
|
1320 |
-
"num_truncated_few_shots": 0
|
1321 |
-
},
|
1322 |
-
"harness|hendrycksTest-world_religions|5": {
|
1323 |
-
"hashes": {
|
1324 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1325 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1326 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
1327 |
-
"hash_cont_tokens": "0a3a3ea5ef49d19c"
|
1328 |
-
},
|
1329 |
-
"truncated": 0,
|
1330 |
-
"non-truncated": 684,
|
1331 |
-
"padded": 684,
|
1332 |
-
"non-padded": 0,
|
1333 |
-
"effective_few_shots": 5.0,
|
1334 |
-
"num_truncated_few_shots": 0
|
1335 |
-
},
|
1336 |
-
"harness|truthfulqa:mc|0": {
|
1337 |
-
"hashes": {
|
1338 |
-
"hash_examples": "23176c0531c7b867",
|
1339 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1340 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
1341 |
-
"hash_cont_tokens": "6d1691881e252df0"
|
1342 |
-
},
|
1343 |
-
"truncated": 0,
|
1344 |
-
"non-truncated": 9996,
|
1345 |
-
"padded": 9996,
|
1346 |
-
"non-padded": 0,
|
1347 |
-
"effective_few_shots": 0.0,
|
1348 |
-
"num_truncated_few_shots": 0
|
1349 |
-
}
|
1350 |
-
},
|
1351 |
-
"summary_general": {
|
1352 |
-
"hashes": {
|
1353 |
-
"hash_examples": "d84d18e9a963753d",
|
1354 |
-
"hash_full_prompts": "12b540783521a8e6",
|
1355 |
-
"hash_input_tokens": "5c73a7dce6ccf737",
|
1356 |
-
"hash_cont_tokens": "f4b7b7f3a2788768"
|
1357 |
-
},
|
1358 |
-
"total_evaluation_time_secondes": "43618.63698196411",
|
1359 |
-
"truncated": 0,
|
1360 |
-
"non-truncated": 111019,
|
1361 |
-
"padded": 110926,
|
1362 |
-
"non-padded": 93,
|
1363 |
-
"num_truncated_few_shots": 0
|
1364 |
-
}
|
1365 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-0-70b-16bit/results_2023-10-04T17-16-57.736703.json
DELETED
@@ -1,1367 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/SOLAR-0-70b-16bit",
|
4 |
-
"model_sha": "43ff16100b9aec3c4d0c56116796149c1c455efc",
|
5 |
-
"model_size": "128.64 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|arc:challenge|25": {
|
16 |
-
"acc": 0.6732081911262798,
|
17 |
-
"acc_stderr": 0.013706665975587333,
|
18 |
-
"acc_norm": 0.7107508532423208,
|
19 |
-
"acc_norm_stderr": 0.013250012579393441
|
20 |
-
},
|
21 |
-
"harness|hellaswag|10": {
|
22 |
-
"acc": 0.6974706233817964,
|
23 |
-
"acc_stderr": 0.00458414401465495,
|
24 |
-
"acc_norm": 0.8789085839474209,
|
25 |
-
"acc_norm_stderr": 0.0032556675321152857
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
28 |
-
"acc": 0.33,
|
29 |
-
"acc_stderr": 0.04725815626252605,
|
30 |
-
"acc_norm": 0.33,
|
31 |
-
"acc_norm_stderr": 0.04725815626252605
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-anatomy|5": {
|
34 |
-
"acc": 0.6518518518518519,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.6518518518518519,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-astronomy|5": {
|
40 |
-
"acc": 0.8421052631578947,
|
41 |
-
"acc_stderr": 0.029674167520101453,
|
42 |
-
"acc_norm": 0.8421052631578947,
|
43 |
-
"acc_norm_stderr": 0.029674167520101453
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-business_ethics|5": {
|
46 |
-
"acc": 0.74,
|
47 |
-
"acc_stderr": 0.044084400227680794,
|
48 |
-
"acc_norm": 0.74,
|
49 |
-
"acc_norm_stderr": 0.044084400227680794
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
52 |
-
"acc": 0.7320754716981132,
|
53 |
-
"acc_stderr": 0.027257260322494845,
|
54 |
-
"acc_norm": 0.7320754716981132,
|
55 |
-
"acc_norm_stderr": 0.027257260322494845
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_biology|5": {
|
58 |
-
"acc": 0.8402777777777778,
|
59 |
-
"acc_stderr": 0.030635578972093274,
|
60 |
-
"acc_norm": 0.8402777777777778,
|
61 |
-
"acc_norm_stderr": 0.030635578972093274
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
64 |
-
"acc": 0.49,
|
65 |
-
"acc_stderr": 0.05024183937956912,
|
66 |
-
"acc_norm": 0.49,
|
67 |
-
"acc_norm_stderr": 0.05024183937956912
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
70 |
-
"acc": 0.6,
|
71 |
-
"acc_stderr": 0.04923659639173309,
|
72 |
-
"acc_norm": 0.6,
|
73 |
-
"acc_norm_stderr": 0.04923659639173309
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
76 |
-
"acc": 0.42,
|
77 |
-
"acc_stderr": 0.049604496374885836,
|
78 |
-
"acc_norm": 0.42,
|
79 |
-
"acc_norm_stderr": 0.049604496374885836
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-college_medicine|5": {
|
82 |
-
"acc": 0.6589595375722543,
|
83 |
-
"acc_stderr": 0.036146654241808254,
|
84 |
-
"acc_norm": 0.6589595375722543,
|
85 |
-
"acc_norm_stderr": 0.036146654241808254
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-college_physics|5": {
|
88 |
-
"acc": 0.4215686274509804,
|
89 |
-
"acc_stderr": 0.04913595201274498,
|
90 |
-
"acc_norm": 0.4215686274509804,
|
91 |
-
"acc_norm_stderr": 0.04913595201274498
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-computer_security|5": {
|
94 |
-
"acc": 0.77,
|
95 |
-
"acc_stderr": 0.04229525846816507,
|
96 |
-
"acc_norm": 0.77,
|
97 |
-
"acc_norm_stderr": 0.04229525846816507
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
100 |
-
"acc": 0.7063829787234043,
|
101 |
-
"acc_stderr": 0.029771642712491227,
|
102 |
-
"acc_norm": 0.7063829787234043,
|
103 |
-
"acc_norm_stderr": 0.029771642712491227
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-econometrics|5": {
|
106 |
-
"acc": 0.4649122807017544,
|
107 |
-
"acc_stderr": 0.04692008381368909,
|
108 |
-
"acc_norm": 0.4649122807017544,
|
109 |
-
"acc_norm_stderr": 0.04692008381368909
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
112 |
-
"acc": 0.6482758620689655,
|
113 |
-
"acc_stderr": 0.0397923663749741,
|
114 |
-
"acc_norm": 0.6482758620689655,
|
115 |
-
"acc_norm_stderr": 0.0397923663749741
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
118 |
-
"acc": 0.46825396825396826,
|
119 |
-
"acc_stderr": 0.025699352832131792,
|
120 |
-
"acc_norm": 0.46825396825396826,
|
121 |
-
"acc_norm_stderr": 0.025699352832131792
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-formal_logic|5": {
|
124 |
-
"acc": 0.46825396825396826,
|
125 |
-
"acc_stderr": 0.04463112720677173,
|
126 |
-
"acc_norm": 0.46825396825396826,
|
127 |
-
"acc_norm_stderr": 0.04463112720677173
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-global_facts|5": {
|
130 |
-
"acc": 0.47,
|
131 |
-
"acc_stderr": 0.05016135580465919,
|
132 |
-
"acc_norm": 0.47,
|
133 |
-
"acc_norm_stderr": 0.05016135580465919
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
136 |
-
"acc": 0.8096774193548387,
|
137 |
-
"acc_stderr": 0.02233170761182307,
|
138 |
-
"acc_norm": 0.8096774193548387,
|
139 |
-
"acc_norm_stderr": 0.02233170761182307
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
142 |
-
"acc": 0.5615763546798029,
|
143 |
-
"acc_stderr": 0.03491207857486519,
|
144 |
-
"acc_norm": 0.5615763546798029,
|
145 |
-
"acc_norm_stderr": 0.03491207857486519
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
148 |
-
"acc": 0.79,
|
149 |
-
"acc_stderr": 0.040936018074033256,
|
150 |
-
"acc_norm": 0.79,
|
151 |
-
"acc_norm_stderr": 0.040936018074033256
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
154 |
-
"acc": 0.8424242424242424,
|
155 |
-
"acc_stderr": 0.02845038880528436,
|
156 |
-
"acc_norm": 0.8424242424242424,
|
157 |
-
"acc_norm_stderr": 0.02845038880528436
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
160 |
-
"acc": 0.8737373737373737,
|
161 |
-
"acc_stderr": 0.023664359402880242,
|
162 |
-
"acc_norm": 0.8737373737373737,
|
163 |
-
"acc_norm_stderr": 0.023664359402880242
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
166 |
-
"acc": 0.9378238341968912,
|
167 |
-
"acc_stderr": 0.017426974154240528,
|
168 |
-
"acc_norm": 0.9378238341968912,
|
169 |
-
"acc_norm_stderr": 0.017426974154240528
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
172 |
-
"acc": 0.7102564102564103,
|
173 |
-
"acc_stderr": 0.023000628243687968,
|
174 |
-
"acc_norm": 0.7102564102564103,
|
175 |
-
"acc_norm_stderr": 0.023000628243687968
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
178 |
-
"acc": 0.31851851851851853,
|
179 |
-
"acc_stderr": 0.028406533090608463,
|
180 |
-
"acc_norm": 0.31851851851851853,
|
181 |
-
"acc_norm_stderr": 0.028406533090608463
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
184 |
-
"acc": 0.7647058823529411,
|
185 |
-
"acc_stderr": 0.02755361446786381,
|
186 |
-
"acc_norm": 0.7647058823529411,
|
187 |
-
"acc_norm_stderr": 0.02755361446786381
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
190 |
-
"acc": 0.47019867549668876,
|
191 |
-
"acc_stderr": 0.04075224992216979,
|
192 |
-
"acc_norm": 0.47019867549668876,
|
193 |
-
"acc_norm_stderr": 0.04075224992216979
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
196 |
-
"acc": 0.9027522935779817,
|
197 |
-
"acc_stderr": 0.012703533408540366,
|
198 |
-
"acc_norm": 0.9027522935779817,
|
199 |
-
"acc_norm_stderr": 0.012703533408540366
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
202 |
-
"acc": 0.6018518518518519,
|
203 |
-
"acc_stderr": 0.033384734032074016,
|
204 |
-
"acc_norm": 0.6018518518518519,
|
205 |
-
"acc_norm_stderr": 0.033384734032074016
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
208 |
-
"acc": 0.9264705882352942,
|
209 |
-
"acc_stderr": 0.01831885585008968,
|
210 |
-
"acc_norm": 0.9264705882352942,
|
211 |
-
"acc_norm_stderr": 0.01831885585008968
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
214 |
-
"acc": 0.8945147679324894,
|
215 |
-
"acc_stderr": 0.01999556072375854,
|
216 |
-
"acc_norm": 0.8945147679324894,
|
217 |
-
"acc_norm_stderr": 0.01999556072375854
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-human_aging|5": {
|
220 |
-
"acc": 0.7937219730941704,
|
221 |
-
"acc_stderr": 0.02715715047956382,
|
222 |
-
"acc_norm": 0.7937219730941704,
|
223 |
-
"acc_norm_stderr": 0.02715715047956382
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
226 |
-
"acc": 0.8625954198473282,
|
227 |
-
"acc_stderr": 0.030194823996804475,
|
228 |
-
"acc_norm": 0.8625954198473282,
|
229 |
-
"acc_norm_stderr": 0.030194823996804475
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-international_law|5": {
|
232 |
-
"acc": 0.859504132231405,
|
233 |
-
"acc_stderr": 0.03172233426002157,
|
234 |
-
"acc_norm": 0.859504132231405,
|
235 |
-
"acc_norm_stderr": 0.03172233426002157
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
238 |
-
"acc": 0.8240740740740741,
|
239 |
-
"acc_stderr": 0.036809181416738807,
|
240 |
-
"acc_norm": 0.8240740740740741,
|
241 |
-
"acc_norm_stderr": 0.036809181416738807
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
244 |
-
"acc": 0.803680981595092,
|
245 |
-
"acc_stderr": 0.031207970394709218,
|
246 |
-
"acc_norm": 0.803680981595092,
|
247 |
-
"acc_norm_stderr": 0.031207970394709218
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-machine_learning|5": {
|
250 |
-
"acc": 0.5089285714285714,
|
251 |
-
"acc_stderr": 0.04745033255489122,
|
252 |
-
"acc_norm": 0.5089285714285714,
|
253 |
-
"acc_norm_stderr": 0.04745033255489122
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-management|5": {
|
256 |
-
"acc": 0.8252427184466019,
|
257 |
-
"acc_stderr": 0.0376017800602662,
|
258 |
-
"acc_norm": 0.8252427184466019,
|
259 |
-
"acc_norm_stderr": 0.0376017800602662
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-marketing|5": {
|
262 |
-
"acc": 0.9017094017094017,
|
263 |
-
"acc_stderr": 0.019503444900757567,
|
264 |
-
"acc_norm": 0.9017094017094017,
|
265 |
-
"acc_norm_stderr": 0.019503444900757567
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
268 |
-
"acc": 0.71,
|
269 |
-
"acc_stderr": 0.04560480215720684,
|
270 |
-
"acc_norm": 0.71,
|
271 |
-
"acc_norm_stderr": 0.04560480215720684
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
274 |
-
"acc": 0.8684546615581098,
|
275 |
-
"acc_stderr": 0.01208670521425043,
|
276 |
-
"acc_norm": 0.8684546615581098,
|
277 |
-
"acc_norm_stderr": 0.01208670521425043
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
280 |
-
"acc": 0.7803468208092486,
|
281 |
-
"acc_stderr": 0.022289638852617893,
|
282 |
-
"acc_norm": 0.7803468208092486,
|
283 |
-
"acc_norm_stderr": 0.022289638852617893
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
286 |
-
"acc": 0.6044692737430167,
|
287 |
-
"acc_stderr": 0.01635341541007577,
|
288 |
-
"acc_norm": 0.6044692737430167,
|
289 |
-
"acc_norm_stderr": 0.01635341541007577
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-nutrition|5": {
|
292 |
-
"acc": 0.7679738562091504,
|
293 |
-
"acc_stderr": 0.024170840879340873,
|
294 |
-
"acc_norm": 0.7679738562091504,
|
295 |
-
"acc_norm_stderr": 0.024170840879340873
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-philosophy|5": {
|
298 |
-
"acc": 0.7781350482315113,
|
299 |
-
"acc_stderr": 0.02359885829286305,
|
300 |
-
"acc_norm": 0.7781350482315113,
|
301 |
-
"acc_norm_stderr": 0.02359885829286305
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-prehistory|5": {
|
304 |
-
"acc": 0.8333333333333334,
|
305 |
-
"acc_stderr": 0.020736358408060006,
|
306 |
-
"acc_norm": 0.8333333333333334,
|
307 |
-
"acc_norm_stderr": 0.020736358408060006
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
310 |
-
"acc": 0.574468085106383,
|
311 |
-
"acc_stderr": 0.029494827600144366,
|
312 |
-
"acc_norm": 0.574468085106383,
|
313 |
-
"acc_norm_stderr": 0.029494827600144366
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_law|5": {
|
316 |
-
"acc": 0.5521512385919165,
|
317 |
-
"acc_stderr": 0.012700582404768235,
|
318 |
-
"acc_norm": 0.5521512385919165,
|
319 |
-
"acc_norm_stderr": 0.012700582404768235
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
322 |
-
"acc": 0.7389705882352942,
|
323 |
-
"acc_stderr": 0.02667925227010314,
|
324 |
-
"acc_norm": 0.7389705882352942,
|
325 |
-
"acc_norm_stderr": 0.02667925227010314
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
328 |
-
"acc": 0.7647058823529411,
|
329 |
-
"acc_stderr": 0.01716058723504635,
|
330 |
-
"acc_norm": 0.7647058823529411,
|
331 |
-
"acc_norm_stderr": 0.01716058723504635
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-public_relations|5": {
|
334 |
-
"acc": 0.7454545454545455,
|
335 |
-
"acc_stderr": 0.041723430387053825,
|
336 |
-
"acc_norm": 0.7454545454545455,
|
337 |
-
"acc_norm_stderr": 0.041723430387053825
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-security_studies|5": {
|
340 |
-
"acc": 0.8204081632653061,
|
341 |
-
"acc_stderr": 0.024573293589585637,
|
342 |
-
"acc_norm": 0.8204081632653061,
|
343 |
-
"acc_norm_stderr": 0.024573293589585637
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-sociology|5": {
|
346 |
-
"acc": 0.8756218905472637,
|
347 |
-
"acc_stderr": 0.023335401790166327,
|
348 |
-
"acc_norm": 0.8756218905472637,
|
349 |
-
"acc_norm_stderr": 0.023335401790166327
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
352 |
-
"acc": 0.9,
|
353 |
-
"acc_stderr": 0.030151134457776334,
|
354 |
-
"acc_norm": 0.9,
|
355 |
-
"acc_norm_stderr": 0.030151134457776334
|
356 |
-
},
|
357 |
-
"harness|hendrycksTest-virology|5": {
|
358 |
-
"acc": 0.5301204819277109,
|
359 |
-
"acc_stderr": 0.03885425420866767,
|
360 |
-
"acc_norm": 0.5301204819277109,
|
361 |
-
"acc_norm_stderr": 0.03885425420866767
|
362 |
-
},
|
363 |
-
"harness|hendrycksTest-world_religions|5": {
|
364 |
-
"acc": 0.8771929824561403,
|
365 |
-
"acc_stderr": 0.02517298435015575,
|
366 |
-
"acc_norm": 0.8771929824561403,
|
367 |
-
"acc_norm_stderr": 0.02517298435015575
|
368 |
-
},
|
369 |
-
"harness|truthfulqa:mc|0": {
|
370 |
-
"mc1": 0.44430844553243576,
|
371 |
-
"mc1_stderr": 0.017394586250743173,
|
372 |
-
"mc2": 0.6224972679005382,
|
373 |
-
"mc2_stderr": 0.014880875055625352
|
374 |
-
},
|
375 |
-
"all": {
|
376 |
-
"acc": 0.7050740464217434,
|
377 |
-
"acc_stderr": 0.03085018588043536,
|
378 |
-
"acc_norm": 0.7087855823993987,
|
379 |
-
"acc_norm_stderr": 0.03081992944181276,
|
380 |
-
"mc1": 0.44430844553243576,
|
381 |
-
"mc1_stderr": 0.017394586250743173,
|
382 |
-
"mc2": 0.6224972679005382,
|
383 |
-
"mc2_stderr": 0.014880875055625352
|
384 |
-
}
|
385 |
-
},
|
386 |
-
"versions": {
|
387 |
-
"harness|arc:challenge|25": 0,
|
388 |
-
"harness|hellaswag|10": 0,
|
389 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
390 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
391 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
392 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
393 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
394 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
395 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
396 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
397 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
398 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
399 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
400 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
401 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
402 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
403 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
404 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
405 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
406 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
409 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
410 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
411 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
412 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
413 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
414 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
415 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
416 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
417 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
418 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
419 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
420 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
421 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
422 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
423 |
-
"harness|hendrycksTest-international_law|5": 1,
|
424 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
425 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
426 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
427 |
-
"harness|hendrycksTest-management|5": 1,
|
428 |
-
"harness|hendrycksTest-marketing|5": 1,
|
429 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
430 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
431 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
432 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
433 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
434 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
435 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
436 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
437 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
438 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
439 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
440 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
441 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
442 |
-
"harness|hendrycksTest-sociology|5": 1,
|
443 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
444 |
-
"harness|hendrycksTest-virology|5": 1,
|
445 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
446 |
-
"harness|truthfulqa:mc|0": 1,
|
447 |
-
"all": 0
|
448 |
-
},
|
449 |
-
"config_tasks": {
|
450 |
-
"harness|arc:challenge": "LM Harness task",
|
451 |
-
"harness|hellaswag": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
507 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
508 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
509 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
510 |
-
},
|
511 |
-
"summary_tasks": {
|
512 |
-
"harness|arc:challenge|25": {
|
513 |
-
"hashes": {
|
514 |
-
"hash_examples": "17b0cae357c0259e",
|
515 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
516 |
-
"hash_input_tokens": "3722289b79076c44",
|
517 |
-
"hash_cont_tokens": "e8abf848493b50f7"
|
518 |
-
},
|
519 |
-
"truncated": 0,
|
520 |
-
"non-truncated": 4687,
|
521 |
-
"padded": 4687,
|
522 |
-
"non-padded": 0,
|
523 |
-
"effective_few_shots": 25.0,
|
524 |
-
"num_truncated_few_shots": 0
|
525 |
-
},
|
526 |
-
"harness|hellaswag|10": {
|
527 |
-
"hashes": {
|
528 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
529 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
530 |
-
"hash_input_tokens": "ececd684171f1ef2",
|
531 |
-
"hash_cont_tokens": "9fe0a5c42e1532db"
|
532 |
-
},
|
533 |
-
"truncated": 0,
|
534 |
-
"non-truncated": 40168,
|
535 |
-
"padded": 40113,
|
536 |
-
"non-padded": 55,
|
537 |
-
"effective_few_shots": 10.0,
|
538 |
-
"num_truncated_few_shots": 0
|
539 |
-
},
|
540 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
541 |
-
"hashes": {
|
542 |
-
"hash_examples": "280f9f325b40559a",
|
543 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
544 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
545 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
546 |
-
},
|
547 |
-
"truncated": 0,
|
548 |
-
"non-truncated": 400,
|
549 |
-
"padded": 400,
|
550 |
-
"non-padded": 0,
|
551 |
-
"effective_few_shots": 5.0,
|
552 |
-
"num_truncated_few_shots": 0
|
553 |
-
},
|
554 |
-
"harness|hendrycksTest-anatomy|5": {
|
555 |
-
"hashes": {
|
556 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
557 |
-
"hash_full_prompts": "516f74bef25df620",
|
558 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
559 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
560 |
-
},
|
561 |
-
"truncated": 0,
|
562 |
-
"non-truncated": 540,
|
563 |
-
"padded": 540,
|
564 |
-
"non-padded": 0,
|
565 |
-
"effective_few_shots": 5.0,
|
566 |
-
"num_truncated_few_shots": 0
|
567 |
-
},
|
568 |
-
"harness|hendrycksTest-astronomy|5": {
|
569 |
-
"hashes": {
|
570 |
-
"hash_examples": "7d587b908da4d762",
|
571 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
572 |
-
"hash_input_tokens": "277a7b1fad566940",
|
573 |
-
"hash_cont_tokens": "440a970fadecdc7b"
|
574 |
-
},
|
575 |
-
"truncated": 0,
|
576 |
-
"non-truncated": 608,
|
577 |
-
"padded": 608,
|
578 |
-
"non-padded": 0,
|
579 |
-
"effective_few_shots": 5.0,
|
580 |
-
"num_truncated_few_shots": 0
|
581 |
-
},
|
582 |
-
"harness|hendrycksTest-business_ethics|5": {
|
583 |
-
"hashes": {
|
584 |
-
"hash_examples": "33e51740670de686",
|
585 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
586 |
-
"hash_input_tokens": "ba552605bc116de5",
|
587 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
588 |
-
},
|
589 |
-
"truncated": 0,
|
590 |
-
"non-truncated": 400,
|
591 |
-
"padded": 400,
|
592 |
-
"non-padded": 0,
|
593 |
-
"effective_few_shots": 5.0,
|
594 |
-
"num_truncated_few_shots": 0
|
595 |
-
},
|
596 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
597 |
-
"hashes": {
|
598 |
-
"hash_examples": "f3366dbe7eefffa4",
|
599 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
600 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
601 |
-
"hash_cont_tokens": "7ecd60c25b9bfe5b"
|
602 |
-
},
|
603 |
-
"truncated": 0,
|
604 |
-
"non-truncated": 1060,
|
605 |
-
"padded": 1060,
|
606 |
-
"non-padded": 0,
|
607 |
-
"effective_few_shots": 5.0,
|
608 |
-
"num_truncated_few_shots": 0
|
609 |
-
},
|
610 |
-
"harness|hendrycksTest-college_biology|5": {
|
611 |
-
"hashes": {
|
612 |
-
"hash_examples": "ca2b6753a0193e7f",
|
613 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
614 |
-
"hash_input_tokens": "da036601573942e2",
|
615 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
616 |
-
},
|
617 |
-
"truncated": 0,
|
618 |
-
"non-truncated": 576,
|
619 |
-
"padded": 576,
|
620 |
-
"non-padded": 0,
|
621 |
-
"effective_few_shots": 5.0,
|
622 |
-
"num_truncated_few_shots": 0
|
623 |
-
},
|
624 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
625 |
-
"hashes": {
|
626 |
-
"hash_examples": "22ff85f1d34f42d1",
|
627 |
-
"hash_full_prompts": "242c9be6da583e95",
|
628 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
629 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
630 |
-
},
|
631 |
-
"truncated": 0,
|
632 |
-
"non-truncated": 400,
|
633 |
-
"padded": 400,
|
634 |
-
"non-padded": 0,
|
635 |
-
"effective_few_shots": 5.0,
|
636 |
-
"num_truncated_few_shots": 0
|
637 |
-
},
|
638 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
639 |
-
"hashes": {
|
640 |
-
"hash_examples": "30318289d717a5cf",
|
641 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
642 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
643 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
644 |
-
},
|
645 |
-
"truncated": 0,
|
646 |
-
"non-truncated": 400,
|
647 |
-
"padded": 400,
|
648 |
-
"non-padded": 0,
|
649 |
-
"effective_few_shots": 5.0,
|
650 |
-
"num_truncated_few_shots": 0
|
651 |
-
},
|
652 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
653 |
-
"hashes": {
|
654 |
-
"hash_examples": "4944d1f0b6b5d911",
|
655 |
-
"hash_full_prompts": "770bc4281c973190",
|
656 |
-
"hash_input_tokens": "614054d17109a25d",
|
657 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
658 |
-
},
|
659 |
-
"truncated": 0,
|
660 |
-
"non-truncated": 400,
|
661 |
-
"padded": 400,
|
662 |
-
"non-padded": 0,
|
663 |
-
"effective_few_shots": 5.0,
|
664 |
-
"num_truncated_few_shots": 0
|
665 |
-
},
|
666 |
-
"harness|hendrycksTest-college_medicine|5": {
|
667 |
-
"hashes": {
|
668 |
-
"hash_examples": "dd69cc33381275af",
|
669 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
670 |
-
"hash_input_tokens": "081bb2b524defd1c",
|
671 |
-
"hash_cont_tokens": "702fb6d82ff0d6ac"
|
672 |
-
},
|
673 |
-
"truncated": 0,
|
674 |
-
"non-truncated": 692,
|
675 |
-
"padded": 692,
|
676 |
-
"non-padded": 0,
|
677 |
-
"effective_few_shots": 5.0,
|
678 |
-
"num_truncated_few_shots": 0
|
679 |
-
},
|
680 |
-
"harness|hendrycksTest-college_physics|5": {
|
681 |
-
"hashes": {
|
682 |
-
"hash_examples": "875dd26d22655b0d",
|
683 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
684 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
685 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
686 |
-
},
|
687 |
-
"truncated": 0,
|
688 |
-
"non-truncated": 408,
|
689 |
-
"padded": 408,
|
690 |
-
"non-padded": 0,
|
691 |
-
"effective_few_shots": 5.0,
|
692 |
-
"num_truncated_few_shots": 0
|
693 |
-
},
|
694 |
-
"harness|hendrycksTest-computer_security|5": {
|
695 |
-
"hashes": {
|
696 |
-
"hash_examples": "006451eedc0ededb",
|
697 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
698 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
699 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
700 |
-
},
|
701 |
-
"truncated": 0,
|
702 |
-
"non-truncated": 400,
|
703 |
-
"padded": 400,
|
704 |
-
"non-padded": 0,
|
705 |
-
"effective_few_shots": 5.0,
|
706 |
-
"num_truncated_few_shots": 0
|
707 |
-
},
|
708 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
709 |
-
"hashes": {
|
710 |
-
"hash_examples": "8874ece872d2ca4c",
|
711 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
712 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
713 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
714 |
-
},
|
715 |
-
"truncated": 0,
|
716 |
-
"non-truncated": 940,
|
717 |
-
"padded": 940,
|
718 |
-
"non-padded": 0,
|
719 |
-
"effective_few_shots": 5.0,
|
720 |
-
"num_truncated_few_shots": 0
|
721 |
-
},
|
722 |
-
"harness|hendrycksTest-econometrics|5": {
|
723 |
-
"hashes": {
|
724 |
-
"hash_examples": "64d3623b0bfaa43f",
|
725 |
-
"hash_full_prompts": "612f340fae41338d",
|
726 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
727 |
-
"hash_cont_tokens": "b1cc6e7e9fcd3827"
|
728 |
-
},
|
729 |
-
"truncated": 0,
|
730 |
-
"non-truncated": 456,
|
731 |
-
"padded": 456,
|
732 |
-
"non-padded": 0,
|
733 |
-
"effective_few_shots": 5.0,
|
734 |
-
"num_truncated_few_shots": 0
|
735 |
-
},
|
736 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
737 |
-
"hashes": {
|
738 |
-
"hash_examples": "e98f51780c674d7e",
|
739 |
-
"hash_full_prompts": "10275b312d812ae6",
|
740 |
-
"hash_input_tokens": "1590c84291399be8",
|
741 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
742 |
-
},
|
743 |
-
"truncated": 0,
|
744 |
-
"non-truncated": 580,
|
745 |
-
"padded": 580,
|
746 |
-
"non-padded": 0,
|
747 |
-
"effective_few_shots": 5.0,
|
748 |
-
"num_truncated_few_shots": 0
|
749 |
-
},
|
750 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
751 |
-
"hashes": {
|
752 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
753 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
754 |
-
"hash_input_tokens": "3269597f715b0da1",
|
755 |
-
"hash_cont_tokens": "bd87bf0c060fd925"
|
756 |
-
},
|
757 |
-
"truncated": 0,
|
758 |
-
"non-truncated": 1512,
|
759 |
-
"padded": 1512,
|
760 |
-
"non-padded": 0,
|
761 |
-
"effective_few_shots": 5.0,
|
762 |
-
"num_truncated_few_shots": 0
|
763 |
-
},
|
764 |
-
"harness|hendrycksTest-formal_logic|5": {
|
765 |
-
"hashes": {
|
766 |
-
"hash_examples": "5a6525665f63ea72",
|
767 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
768 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
769 |
-
"hash_cont_tokens": "eb8932890e0605db"
|
770 |
-
},
|
771 |
-
"truncated": 0,
|
772 |
-
"non-truncated": 504,
|
773 |
-
"padded": 504,
|
774 |
-
"non-padded": 0,
|
775 |
-
"effective_few_shots": 5.0,
|
776 |
-
"num_truncated_few_shots": 0
|
777 |
-
},
|
778 |
-
"harness|hendrycksTest-global_facts|5": {
|
779 |
-
"hashes": {
|
780 |
-
"hash_examples": "371d70d743b2b89b",
|
781 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
782 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
783 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
784 |
-
},
|
785 |
-
"truncated": 0,
|
786 |
-
"non-truncated": 400,
|
787 |
-
"padded": 400,
|
788 |
-
"non-padded": 0,
|
789 |
-
"effective_few_shots": 5.0,
|
790 |
-
"num_truncated_few_shots": 0
|
791 |
-
},
|
792 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
793 |
-
"hashes": {
|
794 |
-
"hash_examples": "a79e1018b1674052",
|
795 |
-
"hash_full_prompts": "e624e26ede922561",
|
796 |
-
"hash_input_tokens": "24423acb928db768",
|
797 |
-
"hash_cont_tokens": "1ddcb86d28cde266"
|
798 |
-
},
|
799 |
-
"truncated": 0,
|
800 |
-
"non-truncated": 1240,
|
801 |
-
"padded": 1240,
|
802 |
-
"non-padded": 0,
|
803 |
-
"effective_few_shots": 5.0,
|
804 |
-
"num_truncated_few_shots": 0
|
805 |
-
},
|
806 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
807 |
-
"hashes": {
|
808 |
-
"hash_examples": "44bfc25c389f0e03",
|
809 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
810 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
811 |
-
"hash_cont_tokens": "176c8dcff38c5f8f"
|
812 |
-
},
|
813 |
-
"truncated": 0,
|
814 |
-
"non-truncated": 812,
|
815 |
-
"padded": 812,
|
816 |
-
"non-padded": 0,
|
817 |
-
"effective_few_shots": 5.0,
|
818 |
-
"num_truncated_few_shots": 0
|
819 |
-
},
|
820 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
821 |
-
"hashes": {
|
822 |
-
"hash_examples": "8b8cdb1084f24169",
|
823 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
824 |
-
"hash_input_tokens": "a20a96b44dcc5b30",
|
825 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
826 |
-
},
|
827 |
-
"truncated": 0,
|
828 |
-
"non-truncated": 400,
|
829 |
-
"padded": 400,
|
830 |
-
"non-padded": 0,
|
831 |
-
"effective_few_shots": 5.0,
|
832 |
-
"num_truncated_few_shots": 0
|
833 |
-
},
|
834 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
835 |
-
"hashes": {
|
836 |
-
"hash_examples": "11cd32d0ef440171",
|
837 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
838 |
-
"hash_input_tokens": "5002f4ac8b1562ca",
|
839 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
840 |
-
},
|
841 |
-
"truncated": 0,
|
842 |
-
"non-truncated": 660,
|
843 |
-
"padded": 656,
|
844 |
-
"non-padded": 4,
|
845 |
-
"effective_few_shots": 5.0,
|
846 |
-
"num_truncated_few_shots": 0
|
847 |
-
},
|
848 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
849 |
-
"hashes": {
|
850 |
-
"hash_examples": "b60019b9e80b642f",
|
851 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
852 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
853 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
854 |
-
},
|
855 |
-
"truncated": 0,
|
856 |
-
"non-truncated": 792,
|
857 |
-
"padded": 792,
|
858 |
-
"non-padded": 0,
|
859 |
-
"effective_few_shots": 5.0,
|
860 |
-
"num_truncated_few_shots": 0
|
861 |
-
},
|
862 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
863 |
-
"hashes": {
|
864 |
-
"hash_examples": "d221ec983d143dc3",
|
865 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
866 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
867 |
-
"hash_cont_tokens": "873d2aab226ba1d8"
|
868 |
-
},
|
869 |
-
"truncated": 0,
|
870 |
-
"non-truncated": 772,
|
871 |
-
"padded": 772,
|
872 |
-
"non-padded": 0,
|
873 |
-
"effective_few_shots": 5.0,
|
874 |
-
"num_truncated_few_shots": 0
|
875 |
-
},
|
876 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
877 |
-
"hashes": {
|
878 |
-
"hash_examples": "59c2915cacfd3fbb",
|
879 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
880 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
881 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
882 |
-
},
|
883 |
-
"truncated": 0,
|
884 |
-
"non-truncated": 1560,
|
885 |
-
"padded": 1560,
|
886 |
-
"non-padded": 0,
|
887 |
-
"effective_few_shots": 5.0,
|
888 |
-
"num_truncated_few_shots": 0
|
889 |
-
},
|
890 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
891 |
-
"hashes": {
|
892 |
-
"hash_examples": "1f8ac897608de342",
|
893 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
894 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
895 |
-
"hash_cont_tokens": "d7907b61bcb8c123"
|
896 |
-
},
|
897 |
-
"truncated": 0,
|
898 |
-
"non-truncated": 1080,
|
899 |
-
"padded": 1080,
|
900 |
-
"non-padded": 0,
|
901 |
-
"effective_few_shots": 5.0,
|
902 |
-
"num_truncated_few_shots": 0
|
903 |
-
},
|
904 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
905 |
-
"hashes": {
|
906 |
-
"hash_examples": "ead6a0f2f6c83370",
|
907 |
-
"hash_full_prompts": "bfc393381298609e",
|
908 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
909 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
910 |
-
},
|
911 |
-
"truncated": 0,
|
912 |
-
"non-truncated": 952,
|
913 |
-
"padded": 952,
|
914 |
-
"non-padded": 0,
|
915 |
-
"effective_few_shots": 5.0,
|
916 |
-
"num_truncated_few_shots": 0
|
917 |
-
},
|
918 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
919 |
-
"hashes": {
|
920 |
-
"hash_examples": "c3f2025990afec64",
|
921 |
-
"hash_full_prompts": "fc78b4997e436734",
|
922 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
923 |
-
"hash_cont_tokens": "0d56317b3e5eedb5"
|
924 |
-
},
|
925 |
-
"truncated": 0,
|
926 |
-
"non-truncated": 604,
|
927 |
-
"padded": 604,
|
928 |
-
"non-padded": 0,
|
929 |
-
"effective_few_shots": 5.0,
|
930 |
-
"num_truncated_few_shots": 0
|
931 |
-
},
|
932 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
933 |
-
"hashes": {
|
934 |
-
"hash_examples": "21f8aab618f6d636",
|
935 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
936 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
937 |
-
"hash_cont_tokens": "09ba1243e7390c0f"
|
938 |
-
},
|
939 |
-
"truncated": 0,
|
940 |
-
"non-truncated": 2180,
|
941 |
-
"padded": 2180,
|
942 |
-
"non-padded": 0,
|
943 |
-
"effective_few_shots": 5.0,
|
944 |
-
"num_truncated_few_shots": 0
|
945 |
-
},
|
946 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
947 |
-
"hashes": {
|
948 |
-
"hash_examples": "2386a60a11fc5de3",
|
949 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
950 |
-
"hash_input_tokens": "400e55b56ee6fbd7",
|
951 |
-
"hash_cont_tokens": "9cc29889c3d3f77d"
|
952 |
-
},
|
953 |
-
"truncated": 0,
|
954 |
-
"non-truncated": 864,
|
955 |
-
"padded": 864,
|
956 |
-
"non-padded": 0,
|
957 |
-
"effective_few_shots": 5.0,
|
958 |
-
"num_truncated_few_shots": 0
|
959 |
-
},
|
960 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
961 |
-
"hashes": {
|
962 |
-
"hash_examples": "74961543be40f04f",
|
963 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
964 |
-
"hash_input_tokens": "c639cce12a46ebad",
|
965 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
966 |
-
},
|
967 |
-
"truncated": 0,
|
968 |
-
"non-truncated": 816,
|
969 |
-
"padded": 816,
|
970 |
-
"non-padded": 0,
|
971 |
-
"effective_few_shots": 5.0,
|
972 |
-
"num_truncated_few_shots": 0
|
973 |
-
},
|
974 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
975 |
-
"hashes": {
|
976 |
-
"hash_examples": "2ad2f6b7198b2234",
|
977 |
-
"hash_full_prompts": "11845057459afd72",
|
978 |
-
"hash_input_tokens": "b9762065cce6f3a6",
|
979 |
-
"hash_cont_tokens": "e02816433ff28daf"
|
980 |
-
},
|
981 |
-
"truncated": 0,
|
982 |
-
"non-truncated": 948,
|
983 |
-
"padded": 948,
|
984 |
-
"non-padded": 0,
|
985 |
-
"effective_few_shots": 5.0,
|
986 |
-
"num_truncated_few_shots": 0
|
987 |
-
},
|
988 |
-
"harness|hendrycksTest-human_aging|5": {
|
989 |
-
"hashes": {
|
990 |
-
"hash_examples": "1a7199dc733e779b",
|
991 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
992 |
-
"hash_input_tokens": "541a75f071dcf579",
|
993 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
994 |
-
},
|
995 |
-
"truncated": 0,
|
996 |
-
"non-truncated": 892,
|
997 |
-
"padded": 892,
|
998 |
-
"non-padded": 0,
|
999 |
-
"effective_few_shots": 5.0,
|
1000 |
-
"num_truncated_few_shots": 0
|
1001 |
-
},
|
1002 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1003 |
-
"hashes": {
|
1004 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1005 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1006 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
1007 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
1008 |
-
},
|
1009 |
-
"truncated": 0,
|
1010 |
-
"non-truncated": 524,
|
1011 |
-
"padded": 524,
|
1012 |
-
"non-padded": 0,
|
1013 |
-
"effective_few_shots": 5.0,
|
1014 |
-
"num_truncated_few_shots": 0
|
1015 |
-
},
|
1016 |
-
"harness|hendrycksTest-international_law|5": {
|
1017 |
-
"hashes": {
|
1018 |
-
"hash_examples": "1300bfd0dfc59114",
|
1019 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1020 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
1021 |
-
"hash_cont_tokens": "8ea8c5ff76a15bca"
|
1022 |
-
},
|
1023 |
-
"truncated": 0,
|
1024 |
-
"non-truncated": 484,
|
1025 |
-
"padded": 484,
|
1026 |
-
"non-padded": 0,
|
1027 |
-
"effective_few_shots": 5.0,
|
1028 |
-
"num_truncated_few_shots": 0
|
1029 |
-
},
|
1030 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1031 |
-
"hashes": {
|
1032 |
-
"hash_examples": "083b1e4904c48dc2",
|
1033 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1034 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
1035 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
1036 |
-
},
|
1037 |
-
"truncated": 0,
|
1038 |
-
"non-truncated": 432,
|
1039 |
-
"padded": 432,
|
1040 |
-
"non-padded": 0,
|
1041 |
-
"effective_few_shots": 5.0,
|
1042 |
-
"num_truncated_few_shots": 0
|
1043 |
-
},
|
1044 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1045 |
-
"hashes": {
|
1046 |
-
"hash_examples": "709128f9926a634c",
|
1047 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1048 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
1049 |
-
"hash_cont_tokens": "3e9e0bdc248fd88a"
|
1050 |
-
},
|
1051 |
-
"truncated": 0,
|
1052 |
-
"non-truncated": 652,
|
1053 |
-
"padded": 648,
|
1054 |
-
"non-padded": 4,
|
1055 |
-
"effective_few_shots": 5.0,
|
1056 |
-
"num_truncated_few_shots": 0
|
1057 |
-
},
|
1058 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1059 |
-
"hashes": {
|
1060 |
-
"hash_examples": "88f22a636029ae47",
|
1061 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1062 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
1063 |
-
"hash_cont_tokens": "55b12fb138c6a064"
|
1064 |
-
},
|
1065 |
-
"truncated": 0,
|
1066 |
-
"non-truncated": 448,
|
1067 |
-
"padded": 448,
|
1068 |
-
"non-padded": 0,
|
1069 |
-
"effective_few_shots": 5.0,
|
1070 |
-
"num_truncated_few_shots": 0
|
1071 |
-
},
|
1072 |
-
"harness|hendrycksTest-management|5": {
|
1073 |
-
"hashes": {
|
1074 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1075 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1076 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
1077 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
1078 |
-
},
|
1079 |
-
"truncated": 0,
|
1080 |
-
"non-truncated": 412,
|
1081 |
-
"padded": 412,
|
1082 |
-
"non-padded": 0,
|
1083 |
-
"effective_few_shots": 5.0,
|
1084 |
-
"num_truncated_few_shots": 0
|
1085 |
-
},
|
1086 |
-
"harness|hendrycksTest-marketing|5": {
|
1087 |
-
"hashes": {
|
1088 |
-
"hash_examples": "2668953431f91e96",
|
1089 |
-
"hash_full_prompts": "77562bef997c7650",
|
1090 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
1091 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
1092 |
-
},
|
1093 |
-
"truncated": 0,
|
1094 |
-
"non-truncated": 936,
|
1095 |
-
"padded": 936,
|
1096 |
-
"non-padded": 0,
|
1097 |
-
"effective_few_shots": 5.0,
|
1098 |
-
"num_truncated_few_shots": 0
|
1099 |
-
},
|
1100 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1101 |
-
"hashes": {
|
1102 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1103 |
-
"hash_full_prompts": "202139046daa118f",
|
1104 |
-
"hash_input_tokens": "886ca823b41c094a",
|
1105 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
1106 |
-
},
|
1107 |
-
"truncated": 0,
|
1108 |
-
"non-truncated": 400,
|
1109 |
-
"padded": 400,
|
1110 |
-
"non-padded": 0,
|
1111 |
-
"effective_few_shots": 5.0,
|
1112 |
-
"num_truncated_few_shots": 0
|
1113 |
-
},
|
1114 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1115 |
-
"hashes": {
|
1116 |
-
"hash_examples": "41adb694024809c2",
|
1117 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1118 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
1119 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
1120 |
-
},
|
1121 |
-
"truncated": 0,
|
1122 |
-
"non-truncated": 3132,
|
1123 |
-
"padded": 3132,
|
1124 |
-
"non-padded": 0,
|
1125 |
-
"effective_few_shots": 5.0,
|
1126 |
-
"num_truncated_few_shots": 0
|
1127 |
-
},
|
1128 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1129 |
-
"hashes": {
|
1130 |
-
"hash_examples": "3171c13ba3c594c4",
|
1131 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1132 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
1133 |
-
"hash_cont_tokens": "3b8bbe9108e55ce9"
|
1134 |
-
},
|
1135 |
-
"truncated": 0,
|
1136 |
-
"non-truncated": 1384,
|
1137 |
-
"padded": 1354,
|
1138 |
-
"non-padded": 30,
|
1139 |
-
"effective_few_shots": 5.0,
|
1140 |
-
"num_truncated_few_shots": 0
|
1141 |
-
},
|
1142 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1143 |
-
"hashes": {
|
1144 |
-
"hash_examples": "9873e077e83e0546",
|
1145 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1146 |
-
"hash_input_tokens": "3e793631e951f23c",
|
1147 |
-
"hash_cont_tokens": "3e9bfc0362e97330"
|
1148 |
-
},
|
1149 |
-
"truncated": 0,
|
1150 |
-
"non-truncated": 3580,
|
1151 |
-
"padded": 3580,
|
1152 |
-
"non-padded": 0,
|
1153 |
-
"effective_few_shots": 5.0,
|
1154 |
-
"num_truncated_few_shots": 0
|
1155 |
-
},
|
1156 |
-
"harness|hendrycksTest-nutrition|5": {
|
1157 |
-
"hashes": {
|
1158 |
-
"hash_examples": "7db1d8142ec14323",
|
1159 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1160 |
-
"hash_input_tokens": "59753c2144ea93af",
|
1161 |
-
"hash_cont_tokens": "23b2dc6ee2da4cfc"
|
1162 |
-
},
|
1163 |
-
"truncated": 0,
|
1164 |
-
"non-truncated": 1224,
|
1165 |
-
"padded": 1224,
|
1166 |
-
"non-padded": 0,
|
1167 |
-
"effective_few_shots": 5.0,
|
1168 |
-
"num_truncated_few_shots": 0
|
1169 |
-
},
|
1170 |
-
"harness|hendrycksTest-philosophy|5": {
|
1171 |
-
"hashes": {
|
1172 |
-
"hash_examples": "9b455b7d72811cc8",
|
1173 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1174 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
1175 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
1176 |
-
},
|
1177 |
-
"truncated": 0,
|
1178 |
-
"non-truncated": 1244,
|
1179 |
-
"padded": 1244,
|
1180 |
-
"non-padded": 0,
|
1181 |
-
"effective_few_shots": 5.0,
|
1182 |
-
"num_truncated_few_shots": 0
|
1183 |
-
},
|
1184 |
-
"harness|hendrycksTest-prehistory|5": {
|
1185 |
-
"hashes": {
|
1186 |
-
"hash_examples": "8be90d0f538f1560",
|
1187 |
-
"hash_full_prompts": "152187949bcd0921",
|
1188 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
1189 |
-
"hash_cont_tokens": "d6458d743d875837"
|
1190 |
-
},
|
1191 |
-
"truncated": 0,
|
1192 |
-
"non-truncated": 1296,
|
1193 |
-
"padded": 1296,
|
1194 |
-
"non-padded": 0,
|
1195 |
-
"effective_few_shots": 5.0,
|
1196 |
-
"num_truncated_few_shots": 0
|
1197 |
-
},
|
1198 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1199 |
-
"hashes": {
|
1200 |
-
"hash_examples": "8d377597916cd07e",
|
1201 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1202 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
1203 |
-
"hash_cont_tokens": "922a195f53a35662"
|
1204 |
-
},
|
1205 |
-
"truncated": 0,
|
1206 |
-
"non-truncated": 1128,
|
1207 |
-
"padded": 1128,
|
1208 |
-
"non-padded": 0,
|
1209 |
-
"effective_few_shots": 5.0,
|
1210 |
-
"num_truncated_few_shots": 0
|
1211 |
-
},
|
1212 |
-
"harness|hendrycksTest-professional_law|5": {
|
1213 |
-
"hashes": {
|
1214 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1215 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1216 |
-
"hash_input_tokens": "c9f7583fff66d361",
|
1217 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
1218 |
-
},
|
1219 |
-
"truncated": 0,
|
1220 |
-
"non-truncated": 6136,
|
1221 |
-
"padded": 6136,
|
1222 |
-
"non-padded": 0,
|
1223 |
-
"effective_few_shots": 5.0,
|
1224 |
-
"num_truncated_few_shots": 0
|
1225 |
-
},
|
1226 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1227 |
-
"hashes": {
|
1228 |
-
"hash_examples": "b20e4e816c1e383e",
|
1229 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1230 |
-
"hash_input_tokens": "40a933f829116f8d",
|
1231 |
-
"hash_cont_tokens": "7cfee54dbddd5a98"
|
1232 |
-
},
|
1233 |
-
"truncated": 0,
|
1234 |
-
"non-truncated": 1088,
|
1235 |
-
"padded": 1088,
|
1236 |
-
"non-padded": 0,
|
1237 |
-
"effective_few_shots": 5.0,
|
1238 |
-
"num_truncated_few_shots": 0
|
1239 |
-
},
|
1240 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1241 |
-
"hashes": {
|
1242 |
-
"hash_examples": "d45b73b22f9cc039",
|
1243 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1244 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
1245 |
-
"hash_cont_tokens": "a86677b2a45c20e1"
|
1246 |
-
},
|
1247 |
-
"truncated": 0,
|
1248 |
-
"non-truncated": 2448,
|
1249 |
-
"padded": 2448,
|
1250 |
-
"non-padded": 0,
|
1251 |
-
"effective_few_shots": 5.0,
|
1252 |
-
"num_truncated_few_shots": 0
|
1253 |
-
},
|
1254 |
-
"harness|hendrycksTest-public_relations|5": {
|
1255 |
-
"hashes": {
|
1256 |
-
"hash_examples": "0d25072e1761652a",
|
1257 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1258 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
1259 |
-
"hash_cont_tokens": "0d756ccaae031757"
|
1260 |
-
},
|
1261 |
-
"truncated": 0,
|
1262 |
-
"non-truncated": 440,
|
1263 |
-
"padded": 440,
|
1264 |
-
"non-padded": 0,
|
1265 |
-
"effective_few_shots": 5.0,
|
1266 |
-
"num_truncated_few_shots": 0
|
1267 |
-
},
|
1268 |
-
"harness|hendrycksTest-security_studies|5": {
|
1269 |
-
"hashes": {
|
1270 |
-
"hash_examples": "62bb8197e63d60d4",
|
1271 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1272 |
-
"hash_input_tokens": "32a03f1f22a6e103",
|
1273 |
-
"hash_cont_tokens": "b2229bc2cfbf594b"
|
1274 |
-
},
|
1275 |
-
"truncated": 0,
|
1276 |
-
"non-truncated": 980,
|
1277 |
-
"padded": 980,
|
1278 |
-
"non-padded": 0,
|
1279 |
-
"effective_few_shots": 5.0,
|
1280 |
-
"num_truncated_few_shots": 0
|
1281 |
-
},
|
1282 |
-
"harness|hendrycksTest-sociology|5": {
|
1283 |
-
"hashes": {
|
1284 |
-
"hash_examples": "e7959df87dea8672",
|
1285 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1286 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
1287 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
1288 |
-
},
|
1289 |
-
"truncated": 0,
|
1290 |
-
"non-truncated": 804,
|
1291 |
-
"padded": 804,
|
1292 |
-
"non-padded": 0,
|
1293 |
-
"effective_few_shots": 5.0,
|
1294 |
-
"num_truncated_few_shots": 0
|
1295 |
-
},
|
1296 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1297 |
-
"hashes": {
|
1298 |
-
"hash_examples": "4a56a01ddca44dca",
|
1299 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1300 |
-
"hash_input_tokens": "42054621e718dbee",
|
1301 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
1302 |
-
},
|
1303 |
-
"truncated": 0,
|
1304 |
-
"non-truncated": 400,
|
1305 |
-
"padded": 400,
|
1306 |
-
"non-padded": 0,
|
1307 |
-
"effective_few_shots": 5.0,
|
1308 |
-
"num_truncated_few_shots": 0
|
1309 |
-
},
|
1310 |
-
"harness|hendrycksTest-virology|5": {
|
1311 |
-
"hashes": {
|
1312 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1313 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1314 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
1315 |
-
"hash_cont_tokens": "af8b3658088cb37f"
|
1316 |
-
},
|
1317 |
-
"truncated": 0,
|
1318 |
-
"non-truncated": 664,
|
1319 |
-
"padded": 664,
|
1320 |
-
"non-padded": 0,
|
1321 |
-
"effective_few_shots": 5.0,
|
1322 |
-
"num_truncated_few_shots": 0
|
1323 |
-
},
|
1324 |
-
"harness|hendrycksTest-world_religions|5": {
|
1325 |
-
"hashes": {
|
1326 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1327 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1328 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
1329 |
-
"hash_cont_tokens": "060118bef6de4e0a"
|
1330 |
-
},
|
1331 |
-
"truncated": 0,
|
1332 |
-
"non-truncated": 684,
|
1333 |
-
"padded": 684,
|
1334 |
-
"non-padded": 0,
|
1335 |
-
"effective_few_shots": 5.0,
|
1336 |
-
"num_truncated_few_shots": 0
|
1337 |
-
},
|
1338 |
-
"harness|truthfulqa:mc|0": {
|
1339 |
-
"hashes": {
|
1340 |
-
"hash_examples": "23176c0531c7b867",
|
1341 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1342 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
1343 |
-
"hash_cont_tokens": "f5da56a132aab151"
|
1344 |
-
},
|
1345 |
-
"truncated": 0,
|
1346 |
-
"non-truncated": 9996,
|
1347 |
-
"padded": 9996,
|
1348 |
-
"non-padded": 0,
|
1349 |
-
"effective_few_shots": 0.0,
|
1350 |
-
"num_truncated_few_shots": 0
|
1351 |
-
}
|
1352 |
-
},
|
1353 |
-
"summary_general": {
|
1354 |
-
"hashes": {
|
1355 |
-
"hash_examples": "d84d18e9a963753d",
|
1356 |
-
"hash_full_prompts": "12b540783521a8e6",
|
1357 |
-
"hash_input_tokens": "5c73a7dce6ccf737",
|
1358 |
-
"hash_cont_tokens": "71d56183130fecbd"
|
1359 |
-
},
|
1360 |
-
"total_evaluation_time_secondes": "43905.18137216568",
|
1361 |
-
"truncated": 0,
|
1362 |
-
"non-truncated": 111019,
|
1363 |
-
"padded": 110926,
|
1364 |
-
"non-padded": 93,
|
1365 |
-
"num_truncated_few_shots": 0
|
1366 |
-
}
|
1367 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-0-70b-16bit/results_2023-11-07T01-00-47.965413.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"lighteval_sha": "167773f1d5d1647c60dadc31c9e731ab7dbcbbad",
|
4 |
-
"num_few_shot_default": 0,
|
5 |
-
"num_fewshot_seeds": 1,
|
6 |
-
"override_batch_size": 1,
|
7 |
-
"max_samples": null,
|
8 |
-
"job_id": "",
|
9 |
-
"model_name": "upstage/SOLAR-0-70b-16bit",
|
10 |
-
"model_sha": "43ff16100b9aec3c4d0c56116796149c1c455efc",
|
11 |
-
"model_dtype": "torch.float16",
|
12 |
-
"model_size": "128.64 GB"
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.3555998322147651,
|
17 |
-
"em_stderr": 0.004902281518260701,
|
18 |
-
"f1": 0.47494337248322493,
|
19 |
-
"f1_stderr": 0.004563199491248503
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.45261561789234267,
|
23 |
-
"acc_stderr": 0.013710499070934969
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.8358326756116812,
|
27 |
-
"acc_stderr": 0.010410849775222808
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.3555998322147651,
|
31 |
-
"em_stderr": 0.004902281518260701,
|
32 |
-
"f1": 0.47494337248322493,
|
33 |
-
"f1_stderr": 0.004563199491248503,
|
34 |
-
"acc": 0.6442241467520119,
|
35 |
-
"acc_stderr": 0.012060674423078888
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"all": 0,
|
40 |
-
"harness|drop|3": 1,
|
41 |
-
"harness|gsm8k|5": 0,
|
42 |
-
"harness|winogrande|5": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "42076f0efbb50aa6",
|
55 |
-
"hash_cont_tokens": "66bbffab016875a8"
|
56 |
-
},
|
57 |
-
"truncated": 3,
|
58 |
-
"non_truncated": 9533,
|
59 |
-
"padded": 0,
|
60 |
-
"non_padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "9c09e66eb06ff92d"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non_truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non_padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non_truncated": 1267,
|
87 |
-
"padded": 2432,
|
88 |
-
"non_padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "a12f3e3c934bd78b",
|
98 |
-
"hash_cont_tokens": "bd0aea9b4869cf08"
|
99 |
-
},
|
100 |
-
"truncated": 3,
|
101 |
-
"non_truncated": 12119,
|
102 |
-
"padded": 2432,
|
103 |
-
"non_padded": 10957,
|
104 |
-
"num_truncated_few_shots": 0,
|
105 |
-
"total_evaluation_time_secondes": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-10.7B-Instruct-v1.0/results_2023-12-13T16-15-33.088115.json
DELETED
@@ -1,1409 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"lighteval_sha": "0e4607eff593f6f842aeaa0e5fa6760f58b9d1e9",
|
4 |
-
"num_few_shot_default": 0,
|
5 |
-
"num_fewshot_seeds": 1,
|
6 |
-
"override_batch_size": 1,
|
7 |
-
"max_samples": null,
|
8 |
-
"job_id": "",
|
9 |
-
"start_time": 101606.671098492,
|
10 |
-
"end_time": 115093.855070956,
|
11 |
-
"total_evaluation_time_secondes": "13487.183972463987",
|
12 |
-
"model_name": "upstage/SOLAR-10.7B-Instruct-v1.0",
|
13 |
-
"model_sha": "d3167df97a44b8632538b32ee8cd887893ea1435",
|
14 |
-
"model_dtype": "torch.float16",
|
15 |
-
"model_size": "20.08 GB"
|
16 |
-
},
|
17 |
-
"results": {
|
18 |
-
"harness|arc:challenge|25": {
|
19 |
-
"acc": 0.6808873720136519,
|
20 |
-
"acc_stderr": 0.013621696119173307,
|
21 |
-
"acc_norm": 0.7107508532423208,
|
22 |
-
"acc_norm_stderr": 0.01325001257939344
|
23 |
-
},
|
24 |
-
"harness|hellaswag|10": {
|
25 |
-
"acc": 0.7070304720175263,
|
26 |
-
"acc_stderr": 0.004541944342035901,
|
27 |
-
"acc_norm": 0.8815972913762199,
|
28 |
-
"acc_norm_stderr": 0.003224240722351317
|
29 |
-
},
|
30 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
31 |
-
"acc": 0.41,
|
32 |
-
"acc_stderr": 0.049431107042371025,
|
33 |
-
"acc_norm": 0.41,
|
34 |
-
"acc_norm_stderr": 0.049431107042371025
|
35 |
-
},
|
36 |
-
"harness|hendrycksTest-anatomy|5": {
|
37 |
-
"acc": 0.6148148148148148,
|
38 |
-
"acc_stderr": 0.04203921040156279,
|
39 |
-
"acc_norm": 0.6148148148148148,
|
40 |
-
"acc_norm_stderr": 0.04203921040156279
|
41 |
-
},
|
42 |
-
"harness|hendrycksTest-astronomy|5": {
|
43 |
-
"acc": 0.7368421052631579,
|
44 |
-
"acc_stderr": 0.03583496176361072,
|
45 |
-
"acc_norm": 0.7368421052631579,
|
46 |
-
"acc_norm_stderr": 0.03583496176361072
|
47 |
-
},
|
48 |
-
"harness|hendrycksTest-business_ethics|5": {
|
49 |
-
"acc": 0.74,
|
50 |
-
"acc_stderr": 0.0440844002276808,
|
51 |
-
"acc_norm": 0.74,
|
52 |
-
"acc_norm_stderr": 0.0440844002276808
|
53 |
-
},
|
54 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
55 |
-
"acc": 0.6792452830188679,
|
56 |
-
"acc_stderr": 0.02872750295788027,
|
57 |
-
"acc_norm": 0.6792452830188679,
|
58 |
-
"acc_norm_stderr": 0.02872750295788027
|
59 |
-
},
|
60 |
-
"harness|hendrycksTest-college_biology|5": {
|
61 |
-
"acc": 0.7638888888888888,
|
62 |
-
"acc_stderr": 0.03551446610810826,
|
63 |
-
"acc_norm": 0.7638888888888888,
|
64 |
-
"acc_norm_stderr": 0.03551446610810826
|
65 |
-
},
|
66 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
67 |
-
"acc": 0.44,
|
68 |
-
"acc_stderr": 0.04988876515698589,
|
69 |
-
"acc_norm": 0.44,
|
70 |
-
"acc_norm_stderr": 0.04988876515698589
|
71 |
-
},
|
72 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
73 |
-
"acc": 0.52,
|
74 |
-
"acc_stderr": 0.05021167315686779,
|
75 |
-
"acc_norm": 0.52,
|
76 |
-
"acc_norm_stderr": 0.05021167315686779
|
77 |
-
},
|
78 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
79 |
-
"acc": 0.31,
|
80 |
-
"acc_stderr": 0.04648231987117316,
|
81 |
-
"acc_norm": 0.31,
|
82 |
-
"acc_norm_stderr": 0.04648231987117316
|
83 |
-
},
|
84 |
-
"harness|hendrycksTest-college_medicine|5": {
|
85 |
-
"acc": 0.6647398843930635,
|
86 |
-
"acc_stderr": 0.03599586301247077,
|
87 |
-
"acc_norm": 0.6647398843930635,
|
88 |
-
"acc_norm_stderr": 0.03599586301247077
|
89 |
-
},
|
90 |
-
"harness|hendrycksTest-college_physics|5": {
|
91 |
-
"acc": 0.38235294117647056,
|
92 |
-
"acc_stderr": 0.04835503696107223,
|
93 |
-
"acc_norm": 0.38235294117647056,
|
94 |
-
"acc_norm_stderr": 0.04835503696107223
|
95 |
-
},
|
96 |
-
"harness|hendrycksTest-computer_security|5": {
|
97 |
-
"acc": 0.76,
|
98 |
-
"acc_stderr": 0.042923469599092816,
|
99 |
-
"acc_norm": 0.76,
|
100 |
-
"acc_norm_stderr": 0.042923469599092816
|
101 |
-
},
|
102 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
103 |
-
"acc": 0.6297872340425532,
|
104 |
-
"acc_stderr": 0.03156564682236785,
|
105 |
-
"acc_norm": 0.6297872340425532,
|
106 |
-
"acc_norm_stderr": 0.03156564682236785
|
107 |
-
},
|
108 |
-
"harness|hendrycksTest-econometrics|5": {
|
109 |
-
"acc": 0.5,
|
110 |
-
"acc_stderr": 0.047036043419179864,
|
111 |
-
"acc_norm": 0.5,
|
112 |
-
"acc_norm_stderr": 0.047036043419179864
|
113 |
-
},
|
114 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
115 |
-
"acc": 0.6413793103448275,
|
116 |
-
"acc_stderr": 0.039966295748767186,
|
117 |
-
"acc_norm": 0.6413793103448275,
|
118 |
-
"acc_norm_stderr": 0.039966295748767186
|
119 |
-
},
|
120 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
121 |
-
"acc": 0.47883597883597884,
|
122 |
-
"acc_stderr": 0.025728230952130726,
|
123 |
-
"acc_norm": 0.47883597883597884,
|
124 |
-
"acc_norm_stderr": 0.025728230952130726
|
125 |
-
},
|
126 |
-
"harness|hendrycksTest-formal_logic|5": {
|
127 |
-
"acc": 0.4444444444444444,
|
128 |
-
"acc_stderr": 0.044444444444444495,
|
129 |
-
"acc_norm": 0.4444444444444444,
|
130 |
-
"acc_norm_stderr": 0.044444444444444495
|
131 |
-
},
|
132 |
-
"harness|hendrycksTest-global_facts|5": {
|
133 |
-
"acc": 0.36,
|
134 |
-
"acc_stderr": 0.048241815132442176,
|
135 |
-
"acc_norm": 0.36,
|
136 |
-
"acc_norm_stderr": 0.048241815132442176
|
137 |
-
},
|
138 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
139 |
-
"acc": 0.8032258064516129,
|
140 |
-
"acc_stderr": 0.022616409420742025,
|
141 |
-
"acc_norm": 0.8032258064516129,
|
142 |
-
"acc_norm_stderr": 0.022616409420742025
|
143 |
-
},
|
144 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
145 |
-
"acc": 0.5172413793103449,
|
146 |
-
"acc_stderr": 0.03515895551165698,
|
147 |
-
"acc_norm": 0.5172413793103449,
|
148 |
-
"acc_norm_stderr": 0.03515895551165698
|
149 |
-
},
|
150 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
151 |
-
"acc": 0.72,
|
152 |
-
"acc_stderr": 0.04512608598542128,
|
153 |
-
"acc_norm": 0.72,
|
154 |
-
"acc_norm_stderr": 0.04512608598542128
|
155 |
-
},
|
156 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
157 |
-
"acc": 0.8,
|
158 |
-
"acc_stderr": 0.031234752377721175,
|
159 |
-
"acc_norm": 0.8,
|
160 |
-
"acc_norm_stderr": 0.031234752377721175
|
161 |
-
},
|
162 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
163 |
-
"acc": 0.8737373737373737,
|
164 |
-
"acc_stderr": 0.02366435940288023,
|
165 |
-
"acc_norm": 0.8737373737373737,
|
166 |
-
"acc_norm_stderr": 0.02366435940288023
|
167 |
-
},
|
168 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
169 |
-
"acc": 0.9067357512953368,
|
170 |
-
"acc_stderr": 0.02098685459328973,
|
171 |
-
"acc_norm": 0.9067357512953368,
|
172 |
-
"acc_norm_stderr": 0.02098685459328973
|
173 |
-
},
|
174 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
175 |
-
"acc": 0.6615384615384615,
|
176 |
-
"acc_stderr": 0.023991500500313036,
|
177 |
-
"acc_norm": 0.6615384615384615,
|
178 |
-
"acc_norm_stderr": 0.023991500500313036
|
179 |
-
},
|
180 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
181 |
-
"acc": 0.3814814814814815,
|
182 |
-
"acc_stderr": 0.029616718927497593,
|
183 |
-
"acc_norm": 0.3814814814814815,
|
184 |
-
"acc_norm_stderr": 0.029616718927497593
|
185 |
-
},
|
186 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
187 |
-
"acc": 0.7184873949579832,
|
188 |
-
"acc_stderr": 0.02921354941437217,
|
189 |
-
"acc_norm": 0.7184873949579832,
|
190 |
-
"acc_norm_stderr": 0.02921354941437217
|
191 |
-
},
|
192 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
193 |
-
"acc": 0.3708609271523179,
|
194 |
-
"acc_stderr": 0.03943966699183629,
|
195 |
-
"acc_norm": 0.3708609271523179,
|
196 |
-
"acc_norm_stderr": 0.03943966699183629
|
197 |
-
},
|
198 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
199 |
-
"acc": 0.8477064220183487,
|
200 |
-
"acc_stderr": 0.015405084393157074,
|
201 |
-
"acc_norm": 0.8477064220183487,
|
202 |
-
"acc_norm_stderr": 0.015405084393157074
|
203 |
-
},
|
204 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
205 |
-
"acc": 0.5555555555555556,
|
206 |
-
"acc_stderr": 0.03388857118502325,
|
207 |
-
"acc_norm": 0.5555555555555556,
|
208 |
-
"acc_norm_stderr": 0.03388857118502325
|
209 |
-
},
|
210 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
211 |
-
"acc": 0.8480392156862745,
|
212 |
-
"acc_stderr": 0.0251956584289318,
|
213 |
-
"acc_norm": 0.8480392156862745,
|
214 |
-
"acc_norm_stderr": 0.0251956584289318
|
215 |
-
},
|
216 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
217 |
-
"acc": 0.8565400843881856,
|
218 |
-
"acc_stderr": 0.022818291821017012,
|
219 |
-
"acc_norm": 0.8565400843881856,
|
220 |
-
"acc_norm_stderr": 0.022818291821017012
|
221 |
-
},
|
222 |
-
"harness|hendrycksTest-human_aging|5": {
|
223 |
-
"acc": 0.6816143497757847,
|
224 |
-
"acc_stderr": 0.03126580522513713,
|
225 |
-
"acc_norm": 0.6816143497757847,
|
226 |
-
"acc_norm_stderr": 0.03126580522513713
|
227 |
-
},
|
228 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
229 |
-
"acc": 0.7480916030534351,
|
230 |
-
"acc_stderr": 0.03807387116306086,
|
231 |
-
"acc_norm": 0.7480916030534351,
|
232 |
-
"acc_norm_stderr": 0.03807387116306086
|
233 |
-
},
|
234 |
-
"harness|hendrycksTest-international_law|5": {
|
235 |
-
"acc": 0.7768595041322314,
|
236 |
-
"acc_stderr": 0.03800754475228733,
|
237 |
-
"acc_norm": 0.7768595041322314,
|
238 |
-
"acc_norm_stderr": 0.03800754475228733
|
239 |
-
},
|
240 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
241 |
-
"acc": 0.8055555555555556,
|
242 |
-
"acc_stderr": 0.038260763248848646,
|
243 |
-
"acc_norm": 0.8055555555555556,
|
244 |
-
"acc_norm_stderr": 0.038260763248848646
|
245 |
-
},
|
246 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
247 |
-
"acc": 0.754601226993865,
|
248 |
-
"acc_stderr": 0.03380939813943354,
|
249 |
-
"acc_norm": 0.754601226993865,
|
250 |
-
"acc_norm_stderr": 0.03380939813943354
|
251 |
-
},
|
252 |
-
"harness|hendrycksTest-machine_learning|5": {
|
253 |
-
"acc": 0.44642857142857145,
|
254 |
-
"acc_stderr": 0.047184714852195886,
|
255 |
-
"acc_norm": 0.44642857142857145,
|
256 |
-
"acc_norm_stderr": 0.047184714852195886
|
257 |
-
},
|
258 |
-
"harness|hendrycksTest-management|5": {
|
259 |
-
"acc": 0.8252427184466019,
|
260 |
-
"acc_stderr": 0.03760178006026621,
|
261 |
-
"acc_norm": 0.8252427184466019,
|
262 |
-
"acc_norm_stderr": 0.03760178006026621
|
263 |
-
},
|
264 |
-
"harness|hendrycksTest-marketing|5": {
|
265 |
-
"acc": 0.8589743589743589,
|
266 |
-
"acc_stderr": 0.02280138253459753,
|
267 |
-
"acc_norm": 0.8589743589743589,
|
268 |
-
"acc_norm_stderr": 0.02280138253459753
|
269 |
-
},
|
270 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
271 |
-
"acc": 0.71,
|
272 |
-
"acc_stderr": 0.045604802157206845,
|
273 |
-
"acc_norm": 0.71,
|
274 |
-
"acc_norm_stderr": 0.045604802157206845
|
275 |
-
},
|
276 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
277 |
-
"acc": 0.8033205619412516,
|
278 |
-
"acc_stderr": 0.014214138556913917,
|
279 |
-
"acc_norm": 0.8033205619412516,
|
280 |
-
"acc_norm_stderr": 0.014214138556913917
|
281 |
-
},
|
282 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
283 |
-
"acc": 0.7601156069364162,
|
284 |
-
"acc_stderr": 0.022989592543123567,
|
285 |
-
"acc_norm": 0.7601156069364162,
|
286 |
-
"acc_norm_stderr": 0.022989592543123567
|
287 |
-
},
|
288 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
289 |
-
"acc": 0.39329608938547483,
|
290 |
-
"acc_stderr": 0.016337268694270112,
|
291 |
-
"acc_norm": 0.39329608938547483,
|
292 |
-
"acc_norm_stderr": 0.016337268694270112
|
293 |
-
},
|
294 |
-
"harness|hendrycksTest-nutrition|5": {
|
295 |
-
"acc": 0.7581699346405228,
|
296 |
-
"acc_stderr": 0.024518195641879334,
|
297 |
-
"acc_norm": 0.7581699346405228,
|
298 |
-
"acc_norm_stderr": 0.024518195641879334
|
299 |
-
},
|
300 |
-
"harness|hendrycksTest-philosophy|5": {
|
301 |
-
"acc": 0.729903536977492,
|
302 |
-
"acc_stderr": 0.02521804037341062,
|
303 |
-
"acc_norm": 0.729903536977492,
|
304 |
-
"acc_norm_stderr": 0.02521804037341062
|
305 |
-
},
|
306 |
-
"harness|hendrycksTest-prehistory|5": {
|
307 |
-
"acc": 0.7901234567901234,
|
308 |
-
"acc_stderr": 0.02265834408598137,
|
309 |
-
"acc_norm": 0.7901234567901234,
|
310 |
-
"acc_norm_stderr": 0.02265834408598137
|
311 |
-
},
|
312 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
313 |
-
"acc": 0.49645390070921985,
|
314 |
-
"acc_stderr": 0.02982674915328092,
|
315 |
-
"acc_norm": 0.49645390070921985,
|
316 |
-
"acc_norm_stderr": 0.02982674915328092
|
317 |
-
},
|
318 |
-
"harness|hendrycksTest-professional_law|5": {
|
319 |
-
"acc": 0.4934810951760104,
|
320 |
-
"acc_stderr": 0.012769150688867503,
|
321 |
-
"acc_norm": 0.4934810951760104,
|
322 |
-
"acc_norm_stderr": 0.012769150688867503
|
323 |
-
},
|
324 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
325 |
-
"acc": 0.7389705882352942,
|
326 |
-
"acc_stderr": 0.026679252270103135,
|
327 |
-
"acc_norm": 0.7389705882352942,
|
328 |
-
"acc_norm_stderr": 0.026679252270103135
|
329 |
-
},
|
330 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
331 |
-
"acc": 0.6911764705882353,
|
332 |
-
"acc_stderr": 0.018690850273595294,
|
333 |
-
"acc_norm": 0.6911764705882353,
|
334 |
-
"acc_norm_stderr": 0.018690850273595294
|
335 |
-
},
|
336 |
-
"harness|hendrycksTest-public_relations|5": {
|
337 |
-
"acc": 0.6909090909090909,
|
338 |
-
"acc_stderr": 0.044262946482000985,
|
339 |
-
"acc_norm": 0.6909090909090909,
|
340 |
-
"acc_norm_stderr": 0.044262946482000985
|
341 |
-
},
|
342 |
-
"harness|hendrycksTest-security_studies|5": {
|
343 |
-
"acc": 0.7346938775510204,
|
344 |
-
"acc_stderr": 0.0282638899437846,
|
345 |
-
"acc_norm": 0.7346938775510204,
|
346 |
-
"acc_norm_stderr": 0.0282638899437846
|
347 |
-
},
|
348 |
-
"harness|hendrycksTest-sociology|5": {
|
349 |
-
"acc": 0.8407960199004975,
|
350 |
-
"acc_stderr": 0.02587064676616913,
|
351 |
-
"acc_norm": 0.8407960199004975,
|
352 |
-
"acc_norm_stderr": 0.02587064676616913
|
353 |
-
},
|
354 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
355 |
-
"acc": 0.9,
|
356 |
-
"acc_stderr": 0.030151134457776334,
|
357 |
-
"acc_norm": 0.9,
|
358 |
-
"acc_norm_stderr": 0.030151134457776334
|
359 |
-
},
|
360 |
-
"harness|hendrycksTest-virology|5": {
|
361 |
-
"acc": 0.5843373493975904,
|
362 |
-
"acc_stderr": 0.03836722176598052,
|
363 |
-
"acc_norm": 0.5843373493975904,
|
364 |
-
"acc_norm_stderr": 0.03836722176598052
|
365 |
-
},
|
366 |
-
"harness|hendrycksTest-world_religions|5": {
|
367 |
-
"acc": 0.7894736842105263,
|
368 |
-
"acc_stderr": 0.03126781714663179,
|
369 |
-
"acc_norm": 0.7894736842105263,
|
370 |
-
"acc_norm_stderr": 0.03126781714663179
|
371 |
-
},
|
372 |
-
"harness|truthfulqa:mc|0": {
|
373 |
-
"mc1": 0.5667074663402693,
|
374 |
-
"mc1_stderr": 0.017347024450107485,
|
375 |
-
"mc2": 0.7142943510205136,
|
376 |
-
"mc2_stderr": 0.015024530295000761
|
377 |
-
},
|
378 |
-
"harness|winogrande|5": {
|
379 |
-
"acc": 0.8358326756116812,
|
380 |
-
"acc_stderr": 0.01041084977522279
|
381 |
-
},
|
382 |
-
"harness|gsm8k|5": {
|
383 |
-
"acc": 0.6474601971190296,
|
384 |
-
"acc_stderr": 0.013159909755930337
|
385 |
-
},
|
386 |
-
"all": {
|
387 |
-
"acc": 0.6657586984797939,
|
388 |
-
"acc_stderr": 0.03165995758526614,
|
389 |
-
"acc_norm": 0.6666511531376961,
|
390 |
-
"acc_norm_stderr": 0.0323050384069596,
|
391 |
-
"mc1": 0.5667074663402693,
|
392 |
-
"mc1_stderr": 0.017347024450107485,
|
393 |
-
"mc2": 0.7142943510205136,
|
394 |
-
"mc2_stderr": 0.015024530295000761
|
395 |
-
}
|
396 |
-
},
|
397 |
-
"versions": {
|
398 |
-
"all": 0,
|
399 |
-
"harness|arc:challenge|25": 0,
|
400 |
-
"harness|gsm8k|5": 0,
|
401 |
-
"harness|hellaswag|10": 0,
|
402 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
403 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
404 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
405 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
406 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
407 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
408 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
409 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
410 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
411 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
412 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
413 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
414 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
415 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
416 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
417 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
418 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
419 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
420 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
421 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
422 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
423 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
424 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
425 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
426 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
427 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
428 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
429 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
430 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
431 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
432 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
433 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
434 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
435 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
436 |
-
"harness|hendrycksTest-international_law|5": 1,
|
437 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
438 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
439 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
440 |
-
"harness|hendrycksTest-management|5": 1,
|
441 |
-
"harness|hendrycksTest-marketing|5": 1,
|
442 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
443 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
444 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
445 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
446 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
447 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
448 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
449 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
450 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
451 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
452 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
453 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
454 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
455 |
-
"harness|hendrycksTest-sociology|5": 1,
|
456 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
457 |
-
"harness|hendrycksTest-virology|5": 1,
|
458 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
459 |
-
"harness|truthfulqa:mc|0": 1,
|
460 |
-
"harness|winogrande|5": 0
|
461 |
-
},
|
462 |
-
"config_tasks": {
|
463 |
-
"harness|arc:challenge": "LM Harness task",
|
464 |
-
"harness|gsm8k": "LM Harness task",
|
465 |
-
"harness|hellaswag": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
507 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
508 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
509 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
510 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
511 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
512 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
513 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
514 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
515 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
516 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
517 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
518 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
519 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
520 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
521 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
522 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
523 |
-
"harness|truthfulqa:mc": "LM Harness task",
|
524 |
-
"harness|winogrande": "LM Harness task"
|
525 |
-
},
|
526 |
-
"summary_tasks": {
|
527 |
-
"harness|arc:challenge|25": {
|
528 |
-
"hashes": {
|
529 |
-
"hash_examples": "17b0cae357c0259e",
|
530 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
531 |
-
"hash_input_tokens": "9bcd0d1d37471713",
|
532 |
-
"hash_cont_tokens": "289aa98c400841d8"
|
533 |
-
},
|
534 |
-
"truncated": 0,
|
535 |
-
"non_truncated": 1172,
|
536 |
-
"padded": 4670,
|
537 |
-
"non_padded": 17,
|
538 |
-
"effective_few_shots": 25.0,
|
539 |
-
"num_truncated_few_shots": 0
|
540 |
-
},
|
541 |
-
"harness|hellaswag|10": {
|
542 |
-
"hashes": {
|
543 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
544 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
545 |
-
"hash_input_tokens": "80b8c6d79740318e",
|
546 |
-
"hash_cont_tokens": "ac460260c3e6efc9"
|
547 |
-
},
|
548 |
-
"truncated": 0,
|
549 |
-
"non_truncated": 10042,
|
550 |
-
"padded": 40101,
|
551 |
-
"non_padded": 67,
|
552 |
-
"effective_few_shots": 10.0,
|
553 |
-
"num_truncated_few_shots": 0
|
554 |
-
},
|
555 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
556 |
-
"hashes": {
|
557 |
-
"hash_examples": "280f9f325b40559a",
|
558 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
559 |
-
"hash_input_tokens": "b813d36287c6556c",
|
560 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
561 |
-
},
|
562 |
-
"truncated": 0,
|
563 |
-
"non_truncated": 100,
|
564 |
-
"padded": 400,
|
565 |
-
"non_padded": 0,
|
566 |
-
"effective_few_shots": 5.0,
|
567 |
-
"num_truncated_few_shots": 0
|
568 |
-
},
|
569 |
-
"harness|hendrycksTest-anatomy|5": {
|
570 |
-
"hashes": {
|
571 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
572 |
-
"hash_full_prompts": "516f74bef25df620",
|
573 |
-
"hash_input_tokens": "09dc2380497f7a47",
|
574 |
-
"hash_cont_tokens": "a52a4f60d98cbe5c"
|
575 |
-
},
|
576 |
-
"truncated": 0,
|
577 |
-
"non_truncated": 135,
|
578 |
-
"padded": 540,
|
579 |
-
"non_padded": 0,
|
580 |
-
"effective_few_shots": 5.0,
|
581 |
-
"num_truncated_few_shots": 0
|
582 |
-
},
|
583 |
-
"harness|hendrycksTest-astronomy|5": {
|
584 |
-
"hashes": {
|
585 |
-
"hash_examples": "7d587b908da4d762",
|
586 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
587 |
-
"hash_input_tokens": "68ca3220b0fdd1f3",
|
588 |
-
"hash_cont_tokens": "10f7d8eeba97841d"
|
589 |
-
},
|
590 |
-
"truncated": 0,
|
591 |
-
"non_truncated": 152,
|
592 |
-
"padded": 608,
|
593 |
-
"non_padded": 0,
|
594 |
-
"effective_few_shots": 5.0,
|
595 |
-
"num_truncated_few_shots": 0
|
596 |
-
},
|
597 |
-
"harness|hendrycksTest-business_ethics|5": {
|
598 |
-
"hashes": {
|
599 |
-
"hash_examples": "33e51740670de686",
|
600 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
601 |
-
"hash_input_tokens": "bd14ef1320de241e",
|
602 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
603 |
-
},
|
604 |
-
"truncated": 0,
|
605 |
-
"non_truncated": 100,
|
606 |
-
"padded": 400,
|
607 |
-
"non_padded": 0,
|
608 |
-
"effective_few_shots": 5.0,
|
609 |
-
"num_truncated_few_shots": 0
|
610 |
-
},
|
611 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
612 |
-
"hashes": {
|
613 |
-
"hash_examples": "f3366dbe7eefffa4",
|
614 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
615 |
-
"hash_input_tokens": "d96186ab98017c43",
|
616 |
-
"hash_cont_tokens": "edef9975ba9165b5"
|
617 |
-
},
|
618 |
-
"truncated": 0,
|
619 |
-
"non_truncated": 265,
|
620 |
-
"padded": 1060,
|
621 |
-
"non_padded": 0,
|
622 |
-
"effective_few_shots": 5.0,
|
623 |
-
"num_truncated_few_shots": 0
|
624 |
-
},
|
625 |
-
"harness|hendrycksTest-college_biology|5": {
|
626 |
-
"hashes": {
|
627 |
-
"hash_examples": "ca2b6753a0193e7f",
|
628 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
629 |
-
"hash_input_tokens": "424136b34e95b200",
|
630 |
-
"hash_cont_tokens": "0aa103ec6602280b"
|
631 |
-
},
|
632 |
-
"truncated": 0,
|
633 |
-
"non_truncated": 144,
|
634 |
-
"padded": 576,
|
635 |
-
"non_padded": 0,
|
636 |
-
"effective_few_shots": 5.0,
|
637 |
-
"num_truncated_few_shots": 0
|
638 |
-
},
|
639 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
640 |
-
"hashes": {
|
641 |
-
"hash_examples": "22ff85f1d34f42d1",
|
642 |
-
"hash_full_prompts": "242c9be6da583e95",
|
643 |
-
"hash_input_tokens": "8dd8b80e336bbe54",
|
644 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
645 |
-
},
|
646 |
-
"truncated": 0,
|
647 |
-
"non_truncated": 100,
|
648 |
-
"padded": 400,
|
649 |
-
"non_padded": 0,
|
650 |
-
"effective_few_shots": 5.0,
|
651 |
-
"num_truncated_few_shots": 0
|
652 |
-
},
|
653 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
654 |
-
"hashes": {
|
655 |
-
"hash_examples": "30318289d717a5cf",
|
656 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
657 |
-
"hash_input_tokens": "145d4cef8ca2261d",
|
658 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
659 |
-
},
|
660 |
-
"truncated": 0,
|
661 |
-
"non_truncated": 100,
|
662 |
-
"padded": 400,
|
663 |
-
"non_padded": 0,
|
664 |
-
"effective_few_shots": 5.0,
|
665 |
-
"num_truncated_few_shots": 0
|
666 |
-
},
|
667 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
668 |
-
"hashes": {
|
669 |
-
"hash_examples": "4944d1f0b6b5d911",
|
670 |
-
"hash_full_prompts": "770bc4281c973190",
|
671 |
-
"hash_input_tokens": "561995d32d2b25c4",
|
672 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
673 |
-
},
|
674 |
-
"truncated": 0,
|
675 |
-
"non_truncated": 100,
|
676 |
-
"padded": 400,
|
677 |
-
"non_padded": 0,
|
678 |
-
"effective_few_shots": 5.0,
|
679 |
-
"num_truncated_few_shots": 0
|
680 |
-
},
|
681 |
-
"harness|hendrycksTest-college_medicine|5": {
|
682 |
-
"hashes": {
|
683 |
-
"hash_examples": "dd69cc33381275af",
|
684 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
685 |
-
"hash_input_tokens": "6a258a9d4418599c",
|
686 |
-
"hash_cont_tokens": "1979021dbc698754"
|
687 |
-
},
|
688 |
-
"truncated": 0,
|
689 |
-
"non_truncated": 173,
|
690 |
-
"padded": 692,
|
691 |
-
"non_padded": 0,
|
692 |
-
"effective_few_shots": 5.0,
|
693 |
-
"num_truncated_few_shots": 0
|
694 |
-
},
|
695 |
-
"harness|hendrycksTest-college_physics|5": {
|
696 |
-
"hashes": {
|
697 |
-
"hash_examples": "875dd26d22655b0d",
|
698 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
699 |
-
"hash_input_tokens": "fa5e0d5b5f97b66a",
|
700 |
-
"hash_cont_tokens": "7cf7fe2bab00acbd"
|
701 |
-
},
|
702 |
-
"truncated": 0,
|
703 |
-
"non_truncated": 102,
|
704 |
-
"padded": 408,
|
705 |
-
"non_padded": 0,
|
706 |
-
"effective_few_shots": 5.0,
|
707 |
-
"num_truncated_few_shots": 0
|
708 |
-
},
|
709 |
-
"harness|hendrycksTest-computer_security|5": {
|
710 |
-
"hashes": {
|
711 |
-
"hash_examples": "006451eedc0ededb",
|
712 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
713 |
-
"hash_input_tokens": "07d27397edfae492",
|
714 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
715 |
-
},
|
716 |
-
"truncated": 0,
|
717 |
-
"non_truncated": 100,
|
718 |
-
"padded": 400,
|
719 |
-
"non_padded": 0,
|
720 |
-
"effective_few_shots": 5.0,
|
721 |
-
"num_truncated_few_shots": 0
|
722 |
-
},
|
723 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
724 |
-
"hashes": {
|
725 |
-
"hash_examples": "8874ece872d2ca4c",
|
726 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
727 |
-
"hash_input_tokens": "da5e6c3c8eb17da6",
|
728 |
-
"hash_cont_tokens": "903f64eed2b0d217"
|
729 |
-
},
|
730 |
-
"truncated": 0,
|
731 |
-
"non_truncated": 235,
|
732 |
-
"padded": 940,
|
733 |
-
"non_padded": 0,
|
734 |
-
"effective_few_shots": 5.0,
|
735 |
-
"num_truncated_few_shots": 0
|
736 |
-
},
|
737 |
-
"harness|hendrycksTest-econometrics|5": {
|
738 |
-
"hashes": {
|
739 |
-
"hash_examples": "64d3623b0bfaa43f",
|
740 |
-
"hash_full_prompts": "612f340fae41338d",
|
741 |
-
"hash_input_tokens": "f6ba8e358bdb523e",
|
742 |
-
"hash_cont_tokens": "721ae6c5302c4bf2"
|
743 |
-
},
|
744 |
-
"truncated": 0,
|
745 |
-
"non_truncated": 114,
|
746 |
-
"padded": 456,
|
747 |
-
"non_padded": 0,
|
748 |
-
"effective_few_shots": 5.0,
|
749 |
-
"num_truncated_few_shots": 0
|
750 |
-
},
|
751 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
752 |
-
"hashes": {
|
753 |
-
"hash_examples": "e98f51780c674d7e",
|
754 |
-
"hash_full_prompts": "10275b312d812ae6",
|
755 |
-
"hash_input_tokens": "b2459da4c5ca8590",
|
756 |
-
"hash_cont_tokens": "15a738960ed3e587"
|
757 |
-
},
|
758 |
-
"truncated": 0,
|
759 |
-
"non_truncated": 145,
|
760 |
-
"padded": 575,
|
761 |
-
"non_padded": 5,
|
762 |
-
"effective_few_shots": 5.0,
|
763 |
-
"num_truncated_few_shots": 0
|
764 |
-
},
|
765 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
766 |
-
"hashes": {
|
767 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
768 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
769 |
-
"hash_input_tokens": "0b969d9ad706a13a",
|
770 |
-
"hash_cont_tokens": "c96470462fc71683"
|
771 |
-
},
|
772 |
-
"truncated": 0,
|
773 |
-
"non_truncated": 378,
|
774 |
-
"padded": 1512,
|
775 |
-
"non_padded": 0,
|
776 |
-
"effective_few_shots": 5.0,
|
777 |
-
"num_truncated_few_shots": 0
|
778 |
-
},
|
779 |
-
"harness|hendrycksTest-formal_logic|5": {
|
780 |
-
"hashes": {
|
781 |
-
"hash_examples": "5a6525665f63ea72",
|
782 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
783 |
-
"hash_input_tokens": "02bc3eb5f90da86e",
|
784 |
-
"hash_cont_tokens": "0e1ce025c9d6ee7e"
|
785 |
-
},
|
786 |
-
"truncated": 0,
|
787 |
-
"non_truncated": 126,
|
788 |
-
"padded": 504,
|
789 |
-
"non_padded": 0,
|
790 |
-
"effective_few_shots": 5.0,
|
791 |
-
"num_truncated_few_shots": 0
|
792 |
-
},
|
793 |
-
"harness|hendrycksTest-global_facts|5": {
|
794 |
-
"hashes": {
|
795 |
-
"hash_examples": "371d70d743b2b89b",
|
796 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
797 |
-
"hash_input_tokens": "3d5106918bcbeb43",
|
798 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
799 |
-
},
|
800 |
-
"truncated": 0,
|
801 |
-
"non_truncated": 100,
|
802 |
-
"padded": 400,
|
803 |
-
"non_padded": 0,
|
804 |
-
"effective_few_shots": 5.0,
|
805 |
-
"num_truncated_few_shots": 0
|
806 |
-
},
|
807 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
808 |
-
"hashes": {
|
809 |
-
"hash_examples": "a79e1018b1674052",
|
810 |
-
"hash_full_prompts": "e624e26ede922561",
|
811 |
-
"hash_input_tokens": "7b089392db2dabbd",
|
812 |
-
"hash_cont_tokens": "e34d57f7d3c4ca16"
|
813 |
-
},
|
814 |
-
"truncated": 0,
|
815 |
-
"non_truncated": 310,
|
816 |
-
"padded": 1240,
|
817 |
-
"non_padded": 0,
|
818 |
-
"effective_few_shots": 5.0,
|
819 |
-
"num_truncated_few_shots": 0
|
820 |
-
},
|
821 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
822 |
-
"hashes": {
|
823 |
-
"hash_examples": "44bfc25c389f0e03",
|
824 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
825 |
-
"hash_input_tokens": "ba90b2ffed1c067d",
|
826 |
-
"hash_cont_tokens": "e8482d44df4b3740"
|
827 |
-
},
|
828 |
-
"truncated": 0,
|
829 |
-
"non_truncated": 203,
|
830 |
-
"padded": 812,
|
831 |
-
"non_padded": 0,
|
832 |
-
"effective_few_shots": 5.0,
|
833 |
-
"num_truncated_few_shots": 0
|
834 |
-
},
|
835 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
836 |
-
"hashes": {
|
837 |
-
"hash_examples": "8b8cdb1084f24169",
|
838 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
839 |
-
"hash_input_tokens": "60eeec309ef0717f",
|
840 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
841 |
-
},
|
842 |
-
"truncated": 0,
|
843 |
-
"non_truncated": 100,
|
844 |
-
"padded": 400,
|
845 |
-
"non_padded": 0,
|
846 |
-
"effective_few_shots": 5.0,
|
847 |
-
"num_truncated_few_shots": 0
|
848 |
-
},
|
849 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
850 |
-
"hashes": {
|
851 |
-
"hash_examples": "11cd32d0ef440171",
|
852 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
853 |
-
"hash_input_tokens": "5e5e8bf3808e0ead",
|
854 |
-
"hash_cont_tokens": "d63e679a49418339"
|
855 |
-
},
|
856 |
-
"truncated": 0,
|
857 |
-
"non_truncated": 165,
|
858 |
-
"padded": 656,
|
859 |
-
"non_padded": 4,
|
860 |
-
"effective_few_shots": 5.0,
|
861 |
-
"num_truncated_few_shots": 0
|
862 |
-
},
|
863 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
864 |
-
"hashes": {
|
865 |
-
"hash_examples": "b60019b9e80b642f",
|
866 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
867 |
-
"hash_input_tokens": "4da9b741d4e7ea78",
|
868 |
-
"hash_cont_tokens": "d78483e286d06f1a"
|
869 |
-
},
|
870 |
-
"truncated": 0,
|
871 |
-
"non_truncated": 198,
|
872 |
-
"padded": 792,
|
873 |
-
"non_padded": 0,
|
874 |
-
"effective_few_shots": 5.0,
|
875 |
-
"num_truncated_few_shots": 0
|
876 |
-
},
|
877 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
878 |
-
"hashes": {
|
879 |
-
"hash_examples": "d221ec983d143dc3",
|
880 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
881 |
-
"hash_input_tokens": "acb4bc872ac86ed7",
|
882 |
-
"hash_cont_tokens": "691cdff71ff5fe57"
|
883 |
-
},
|
884 |
-
"truncated": 0,
|
885 |
-
"non_truncated": 193,
|
886 |
-
"padded": 772,
|
887 |
-
"non_padded": 0,
|
888 |
-
"effective_few_shots": 5.0,
|
889 |
-
"num_truncated_few_shots": 0
|
890 |
-
},
|
891 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
892 |
-
"hashes": {
|
893 |
-
"hash_examples": "59c2915cacfd3fbb",
|
894 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
895 |
-
"hash_input_tokens": "840fc6403eb69ab0",
|
896 |
-
"hash_cont_tokens": "d5ad4c5bdca967ad"
|
897 |
-
},
|
898 |
-
"truncated": 0,
|
899 |
-
"non_truncated": 390,
|
900 |
-
"padded": 1560,
|
901 |
-
"non_padded": 0,
|
902 |
-
"effective_few_shots": 5.0,
|
903 |
-
"num_truncated_few_shots": 0
|
904 |
-
},
|
905 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
906 |
-
"hashes": {
|
907 |
-
"hash_examples": "1f8ac897608de342",
|
908 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
909 |
-
"hash_input_tokens": "3629a7f2cd17faeb",
|
910 |
-
"hash_cont_tokens": "8f631ca5687dd0d4"
|
911 |
-
},
|
912 |
-
"truncated": 0,
|
913 |
-
"non_truncated": 270,
|
914 |
-
"padded": 1080,
|
915 |
-
"non_padded": 0,
|
916 |
-
"effective_few_shots": 5.0,
|
917 |
-
"num_truncated_few_shots": 0
|
918 |
-
},
|
919 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
920 |
-
"hashes": {
|
921 |
-
"hash_examples": "ead6a0f2f6c83370",
|
922 |
-
"hash_full_prompts": "bfc393381298609e",
|
923 |
-
"hash_input_tokens": "6846f684260e3997",
|
924 |
-
"hash_cont_tokens": "7321048a28451473"
|
925 |
-
},
|
926 |
-
"truncated": 0,
|
927 |
-
"non_truncated": 238,
|
928 |
-
"padded": 952,
|
929 |
-
"non_padded": 0,
|
930 |
-
"effective_few_shots": 5.0,
|
931 |
-
"num_truncated_few_shots": 0
|
932 |
-
},
|
933 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
934 |
-
"hashes": {
|
935 |
-
"hash_examples": "c3f2025990afec64",
|
936 |
-
"hash_full_prompts": "fc78b4997e436734",
|
937 |
-
"hash_input_tokens": "85aee25d6bdad94a",
|
938 |
-
"hash_cont_tokens": "bb137581f269861c"
|
939 |
-
},
|
940 |
-
"truncated": 0,
|
941 |
-
"non_truncated": 151,
|
942 |
-
"padded": 604,
|
943 |
-
"non_padded": 0,
|
944 |
-
"effective_few_shots": 5.0,
|
945 |
-
"num_truncated_few_shots": 0
|
946 |
-
},
|
947 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
948 |
-
"hashes": {
|
949 |
-
"hash_examples": "21f8aab618f6d636",
|
950 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
951 |
-
"hash_input_tokens": "290b66d6d666a35f",
|
952 |
-
"hash_cont_tokens": "b455cab2675bd863"
|
953 |
-
},
|
954 |
-
"truncated": 0,
|
955 |
-
"non_truncated": 545,
|
956 |
-
"padded": 2180,
|
957 |
-
"non_padded": 0,
|
958 |
-
"effective_few_shots": 5.0,
|
959 |
-
"num_truncated_few_shots": 0
|
960 |
-
},
|
961 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
962 |
-
"hashes": {
|
963 |
-
"hash_examples": "2386a60a11fc5de3",
|
964 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
965 |
-
"hash_input_tokens": "a77a7668b437bc82",
|
966 |
-
"hash_cont_tokens": "1b3196fec7e58037"
|
967 |
-
},
|
968 |
-
"truncated": 0,
|
969 |
-
"non_truncated": 216,
|
970 |
-
"padded": 864,
|
971 |
-
"non_padded": 0,
|
972 |
-
"effective_few_shots": 5.0,
|
973 |
-
"num_truncated_few_shots": 0
|
974 |
-
},
|
975 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
976 |
-
"hashes": {
|
977 |
-
"hash_examples": "74961543be40f04f",
|
978 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
979 |
-
"hash_input_tokens": "63548c7fa9ba7a78",
|
980 |
-
"hash_cont_tokens": "a331dedc2aa01b3e"
|
981 |
-
},
|
982 |
-
"truncated": 0,
|
983 |
-
"non_truncated": 204,
|
984 |
-
"padded": 816,
|
985 |
-
"non_padded": 0,
|
986 |
-
"effective_few_shots": 5.0,
|
987 |
-
"num_truncated_few_shots": 0
|
988 |
-
},
|
989 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
990 |
-
"hashes": {
|
991 |
-
"hash_examples": "2ad2f6b7198b2234",
|
992 |
-
"hash_full_prompts": "11845057459afd72",
|
993 |
-
"hash_input_tokens": "83c5da18bfa50812",
|
994 |
-
"hash_cont_tokens": "d0fbe030b8c8c2bf"
|
995 |
-
},
|
996 |
-
"truncated": 0,
|
997 |
-
"non_truncated": 237,
|
998 |
-
"padded": 948,
|
999 |
-
"non_padded": 0,
|
1000 |
-
"effective_few_shots": 5.0,
|
1001 |
-
"num_truncated_few_shots": 0
|
1002 |
-
},
|
1003 |
-
"harness|hendrycksTest-human_aging|5": {
|
1004 |
-
"hashes": {
|
1005 |
-
"hash_examples": "1a7199dc733e779b",
|
1006 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
1007 |
-
"hash_input_tokens": "bebbd11f22006685",
|
1008 |
-
"hash_cont_tokens": "1dd29c3755494850"
|
1009 |
-
},
|
1010 |
-
"truncated": 0,
|
1011 |
-
"non_truncated": 223,
|
1012 |
-
"padded": 892,
|
1013 |
-
"non_padded": 0,
|
1014 |
-
"effective_few_shots": 5.0,
|
1015 |
-
"num_truncated_few_shots": 0
|
1016 |
-
},
|
1017 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1018 |
-
"hashes": {
|
1019 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1020 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1021 |
-
"hash_input_tokens": "7b85ee9b8ee54f4f",
|
1022 |
-
"hash_cont_tokens": "c85573f663c10691"
|
1023 |
-
},
|
1024 |
-
"truncated": 0,
|
1025 |
-
"non_truncated": 131,
|
1026 |
-
"padded": 524,
|
1027 |
-
"non_padded": 0,
|
1028 |
-
"effective_few_shots": 5.0,
|
1029 |
-
"num_truncated_few_shots": 0
|
1030 |
-
},
|
1031 |
-
"harness|hendrycksTest-international_law|5": {
|
1032 |
-
"hashes": {
|
1033 |
-
"hash_examples": "1300bfd0dfc59114",
|
1034 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1035 |
-
"hash_input_tokens": "7bfc55ab7065943e",
|
1036 |
-
"hash_cont_tokens": "d263804ba918154f"
|
1037 |
-
},
|
1038 |
-
"truncated": 0,
|
1039 |
-
"non_truncated": 121,
|
1040 |
-
"padded": 484,
|
1041 |
-
"non_padded": 0,
|
1042 |
-
"effective_few_shots": 5.0,
|
1043 |
-
"num_truncated_few_shots": 0
|
1044 |
-
},
|
1045 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1046 |
-
"hashes": {
|
1047 |
-
"hash_examples": "083b1e4904c48dc2",
|
1048 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1049 |
-
"hash_input_tokens": "69573f1675e053c6",
|
1050 |
-
"hash_cont_tokens": "581986691a84ece8"
|
1051 |
-
},
|
1052 |
-
"truncated": 0,
|
1053 |
-
"non_truncated": 108,
|
1054 |
-
"padded": 432,
|
1055 |
-
"non_padded": 0,
|
1056 |
-
"effective_few_shots": 5.0,
|
1057 |
-
"num_truncated_few_shots": 0
|
1058 |
-
},
|
1059 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1060 |
-
"hashes": {
|
1061 |
-
"hash_examples": "709128f9926a634c",
|
1062 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1063 |
-
"hash_input_tokens": "552324ef20094bdc",
|
1064 |
-
"hash_cont_tokens": "55a858b28bbda458"
|
1065 |
-
},
|
1066 |
-
"truncated": 0,
|
1067 |
-
"non_truncated": 163,
|
1068 |
-
"padded": 652,
|
1069 |
-
"non_padded": 0,
|
1070 |
-
"effective_few_shots": 5.0,
|
1071 |
-
"num_truncated_few_shots": 0
|
1072 |
-
},
|
1073 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1074 |
-
"hashes": {
|
1075 |
-
"hash_examples": "88f22a636029ae47",
|
1076 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1077 |
-
"hash_input_tokens": "96449357a7318905",
|
1078 |
-
"hash_cont_tokens": "e99d3d3efd4ac7a3"
|
1079 |
-
},
|
1080 |
-
"truncated": 0,
|
1081 |
-
"non_truncated": 112,
|
1082 |
-
"padded": 448,
|
1083 |
-
"non_padded": 0,
|
1084 |
-
"effective_few_shots": 5.0,
|
1085 |
-
"num_truncated_few_shots": 0
|
1086 |
-
},
|
1087 |
-
"harness|hendrycksTest-management|5": {
|
1088 |
-
"hashes": {
|
1089 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1090 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1091 |
-
"hash_input_tokens": "3b849249168e3b88",
|
1092 |
-
"hash_cont_tokens": "13d9dc56bca34726"
|
1093 |
-
},
|
1094 |
-
"truncated": 0,
|
1095 |
-
"non_truncated": 103,
|
1096 |
-
"padded": 412,
|
1097 |
-
"non_padded": 0,
|
1098 |
-
"effective_few_shots": 5.0,
|
1099 |
-
"num_truncated_few_shots": 0
|
1100 |
-
},
|
1101 |
-
"harness|hendrycksTest-marketing|5": {
|
1102 |
-
"hashes": {
|
1103 |
-
"hash_examples": "2668953431f91e96",
|
1104 |
-
"hash_full_prompts": "77562bef997c7650",
|
1105 |
-
"hash_input_tokens": "af0e186f2756b70d",
|
1106 |
-
"hash_cont_tokens": "2700ea26933916a2"
|
1107 |
-
},
|
1108 |
-
"truncated": 0,
|
1109 |
-
"non_truncated": 234,
|
1110 |
-
"padded": 936,
|
1111 |
-
"non_padded": 0,
|
1112 |
-
"effective_few_shots": 5.0,
|
1113 |
-
"num_truncated_few_shots": 0
|
1114 |
-
},
|
1115 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1116 |
-
"hashes": {
|
1117 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1118 |
-
"hash_full_prompts": "202139046daa118f",
|
1119 |
-
"hash_input_tokens": "9f6a6de16509b6d9",
|
1120 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1121 |
-
},
|
1122 |
-
"truncated": 0,
|
1123 |
-
"non_truncated": 100,
|
1124 |
-
"padded": 400,
|
1125 |
-
"non_padded": 0,
|
1126 |
-
"effective_few_shots": 5.0,
|
1127 |
-
"num_truncated_few_shots": 0
|
1128 |
-
},
|
1129 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1130 |
-
"hashes": {
|
1131 |
-
"hash_examples": "41adb694024809c2",
|
1132 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1133 |
-
"hash_input_tokens": "9194406d589f7c10",
|
1134 |
-
"hash_cont_tokens": "7bf4341c79587250"
|
1135 |
-
},
|
1136 |
-
"truncated": 0,
|
1137 |
-
"non_truncated": 783,
|
1138 |
-
"padded": 3132,
|
1139 |
-
"non_padded": 0,
|
1140 |
-
"effective_few_shots": 5.0,
|
1141 |
-
"num_truncated_few_shots": 0
|
1142 |
-
},
|
1143 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1144 |
-
"hashes": {
|
1145 |
-
"hash_examples": "3171c13ba3c594c4",
|
1146 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1147 |
-
"hash_input_tokens": "769486efc74d9f8e",
|
1148 |
-
"hash_cont_tokens": "38a48e9de6976f00"
|
1149 |
-
},
|
1150 |
-
"truncated": 0,
|
1151 |
-
"non_truncated": 346,
|
1152 |
-
"padded": 1384,
|
1153 |
-
"non_padded": 0,
|
1154 |
-
"effective_few_shots": 5.0,
|
1155 |
-
"num_truncated_few_shots": 0
|
1156 |
-
},
|
1157 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1158 |
-
"hashes": {
|
1159 |
-
"hash_examples": "9873e077e83e0546",
|
1160 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1161 |
-
"hash_input_tokens": "a90fd4dd90959dad",
|
1162 |
-
"hash_cont_tokens": "761c4dc187689d89"
|
1163 |
-
},
|
1164 |
-
"truncated": 0,
|
1165 |
-
"non_truncated": 895,
|
1166 |
-
"padded": 3580,
|
1167 |
-
"non_padded": 0,
|
1168 |
-
"effective_few_shots": 5.0,
|
1169 |
-
"num_truncated_few_shots": 0
|
1170 |
-
},
|
1171 |
-
"harness|hendrycksTest-nutrition|5": {
|
1172 |
-
"hashes": {
|
1173 |
-
"hash_examples": "7db1d8142ec14323",
|
1174 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1175 |
-
"hash_input_tokens": "1a3b843e66efd29b",
|
1176 |
-
"hash_cont_tokens": "65005bd7d6f6012a"
|
1177 |
-
},
|
1178 |
-
"truncated": 0,
|
1179 |
-
"non_truncated": 306,
|
1180 |
-
"padded": 1224,
|
1181 |
-
"non_padded": 0,
|
1182 |
-
"effective_few_shots": 5.0,
|
1183 |
-
"num_truncated_few_shots": 0
|
1184 |
-
},
|
1185 |
-
"harness|hendrycksTest-philosophy|5": {
|
1186 |
-
"hashes": {
|
1187 |
-
"hash_examples": "9b455b7d72811cc8",
|
1188 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1189 |
-
"hash_input_tokens": "09820001a3d00013",
|
1190 |
-
"hash_cont_tokens": "0b47934fb6314dec"
|
1191 |
-
},
|
1192 |
-
"truncated": 0,
|
1193 |
-
"non_truncated": 311,
|
1194 |
-
"padded": 1244,
|
1195 |
-
"non_padded": 0,
|
1196 |
-
"effective_few_shots": 5.0,
|
1197 |
-
"num_truncated_few_shots": 0
|
1198 |
-
},
|
1199 |
-
"harness|hendrycksTest-prehistory|5": {
|
1200 |
-
"hashes": {
|
1201 |
-
"hash_examples": "8be90d0f538f1560",
|
1202 |
-
"hash_full_prompts": "152187949bcd0921",
|
1203 |
-
"hash_input_tokens": "7c4ec364ce2768c7",
|
1204 |
-
"hash_cont_tokens": "3f20acd855ee0a29"
|
1205 |
-
},
|
1206 |
-
"truncated": 0,
|
1207 |
-
"non_truncated": 324,
|
1208 |
-
"padded": 1296,
|
1209 |
-
"non_padded": 0,
|
1210 |
-
"effective_few_shots": 5.0,
|
1211 |
-
"num_truncated_few_shots": 0
|
1212 |
-
},
|
1213 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1214 |
-
"hashes": {
|
1215 |
-
"hash_examples": "8d377597916cd07e",
|
1216 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1217 |
-
"hash_input_tokens": "ced0534574d0ae3f",
|
1218 |
-
"hash_cont_tokens": "8f122ba881355d4b"
|
1219 |
-
},
|
1220 |
-
"truncated": 0,
|
1221 |
-
"non_truncated": 282,
|
1222 |
-
"padded": 1128,
|
1223 |
-
"non_padded": 0,
|
1224 |
-
"effective_few_shots": 5.0,
|
1225 |
-
"num_truncated_few_shots": 0
|
1226 |
-
},
|
1227 |
-
"harness|hendrycksTest-professional_law|5": {
|
1228 |
-
"hashes": {
|
1229 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1230 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1231 |
-
"hash_input_tokens": "bcbdbbde22ec73e3",
|
1232 |
-
"hash_cont_tokens": "90d5df417c4d3fd3"
|
1233 |
-
},
|
1234 |
-
"truncated": 0,
|
1235 |
-
"non_truncated": 1534,
|
1236 |
-
"padded": 6136,
|
1237 |
-
"non_padded": 0,
|
1238 |
-
"effective_few_shots": 5.0,
|
1239 |
-
"num_truncated_few_shots": 0
|
1240 |
-
},
|
1241 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1242 |
-
"hashes": {
|
1243 |
-
"hash_examples": "b20e4e816c1e383e",
|
1244 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1245 |
-
"hash_input_tokens": "c54d753563114d45",
|
1246 |
-
"hash_cont_tokens": "4a2d2988884f7f70"
|
1247 |
-
},
|
1248 |
-
"truncated": 0,
|
1249 |
-
"non_truncated": 272,
|
1250 |
-
"padded": 1088,
|
1251 |
-
"non_padded": 0,
|
1252 |
-
"effective_few_shots": 5.0,
|
1253 |
-
"num_truncated_few_shots": 0
|
1254 |
-
},
|
1255 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1256 |
-
"hashes": {
|
1257 |
-
"hash_examples": "d45b73b22f9cc039",
|
1258 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1259 |
-
"hash_input_tokens": "b75dc55c0e32fa52",
|
1260 |
-
"hash_cont_tokens": "e0a952cb8a9c81de"
|
1261 |
-
},
|
1262 |
-
"truncated": 0,
|
1263 |
-
"non_truncated": 612,
|
1264 |
-
"padded": 2448,
|
1265 |
-
"non_padded": 0,
|
1266 |
-
"effective_few_shots": 5.0,
|
1267 |
-
"num_truncated_few_shots": 0
|
1268 |
-
},
|
1269 |
-
"harness|hendrycksTest-public_relations|5": {
|
1270 |
-
"hashes": {
|
1271 |
-
"hash_examples": "0d25072e1761652a",
|
1272 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1273 |
-
"hash_input_tokens": "5ccdc8ec8db99622",
|
1274 |
-
"hash_cont_tokens": "1fa77a8dff3922b8"
|
1275 |
-
},
|
1276 |
-
"truncated": 0,
|
1277 |
-
"non_truncated": 110,
|
1278 |
-
"padded": 440,
|
1279 |
-
"non_padded": 0,
|
1280 |
-
"effective_few_shots": 5.0,
|
1281 |
-
"num_truncated_few_shots": 0
|
1282 |
-
},
|
1283 |
-
"harness|hendrycksTest-security_studies|5": {
|
1284 |
-
"hashes": {
|
1285 |
-
"hash_examples": "62bb8197e63d60d4",
|
1286 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1287 |
-
"hash_input_tokens": "ca8497342e5b1d57",
|
1288 |
-
"hash_cont_tokens": "81fc9cb3cbdd52db"
|
1289 |
-
},
|
1290 |
-
"truncated": 0,
|
1291 |
-
"non_truncated": 245,
|
1292 |
-
"padded": 980,
|
1293 |
-
"non_padded": 0,
|
1294 |
-
"effective_few_shots": 5.0,
|
1295 |
-
"num_truncated_few_shots": 0
|
1296 |
-
},
|
1297 |
-
"harness|hendrycksTest-sociology|5": {
|
1298 |
-
"hashes": {
|
1299 |
-
"hash_examples": "e7959df87dea8672",
|
1300 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1301 |
-
"hash_input_tokens": "069c76424fbd3dab",
|
1302 |
-
"hash_cont_tokens": "2a0493252ed2cf43"
|
1303 |
-
},
|
1304 |
-
"truncated": 0,
|
1305 |
-
"non_truncated": 201,
|
1306 |
-
"padded": 804,
|
1307 |
-
"non_padded": 0,
|
1308 |
-
"effective_few_shots": 5.0,
|
1309 |
-
"num_truncated_few_shots": 0
|
1310 |
-
},
|
1311 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1312 |
-
"hashes": {
|
1313 |
-
"hash_examples": "4a56a01ddca44dca",
|
1314 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1315 |
-
"hash_input_tokens": "a7e393a626169576",
|
1316 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1317 |
-
},
|
1318 |
-
"truncated": 0,
|
1319 |
-
"non_truncated": 100,
|
1320 |
-
"padded": 400,
|
1321 |
-
"non_padded": 0,
|
1322 |
-
"effective_few_shots": 5.0,
|
1323 |
-
"num_truncated_few_shots": 0
|
1324 |
-
},
|
1325 |
-
"harness|hendrycksTest-virology|5": {
|
1326 |
-
"hashes": {
|
1327 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1328 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1329 |
-
"hash_input_tokens": "bf99dc973e3a650d",
|
1330 |
-
"hash_cont_tokens": "5ab892d003b00c98"
|
1331 |
-
},
|
1332 |
-
"truncated": 0,
|
1333 |
-
"non_truncated": 166,
|
1334 |
-
"padded": 664,
|
1335 |
-
"non_padded": 0,
|
1336 |
-
"effective_few_shots": 5.0,
|
1337 |
-
"num_truncated_few_shots": 0
|
1338 |
-
},
|
1339 |
-
"harness|hendrycksTest-world_religions|5": {
|
1340 |
-
"hashes": {
|
1341 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1342 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1343 |
-
"hash_input_tokens": "1761cfaf21797065",
|
1344 |
-
"hash_cont_tokens": "15a5e5dbdfbb8568"
|
1345 |
-
},
|
1346 |
-
"truncated": 0,
|
1347 |
-
"non_truncated": 171,
|
1348 |
-
"padded": 684,
|
1349 |
-
"non_padded": 0,
|
1350 |
-
"effective_few_shots": 5.0,
|
1351 |
-
"num_truncated_few_shots": 0
|
1352 |
-
},
|
1353 |
-
"harness|truthfulqa:mc|0": {
|
1354 |
-
"hashes": {
|
1355 |
-
"hash_examples": "23176c0531c7b867",
|
1356 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1357 |
-
"hash_input_tokens": "298b43914bbdf4ca",
|
1358 |
-
"hash_cont_tokens": "5a8d4bb398b1c3c0"
|
1359 |
-
},
|
1360 |
-
"truncated": 0,
|
1361 |
-
"non_truncated": 817,
|
1362 |
-
"padded": 9996,
|
1363 |
-
"non_padded": 0,
|
1364 |
-
"effective_few_shots": 0.0,
|
1365 |
-
"num_truncated_few_shots": 0
|
1366 |
-
},
|
1367 |
-
"harness|winogrande|5": {
|
1368 |
-
"hashes": {
|
1369 |
-
"hash_examples": "aada0a176fd81218",
|
1370 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
1371 |
-
"hash_input_tokens": "31aa3477d959f771",
|
1372 |
-
"hash_cont_tokens": "618558fb93c0f288"
|
1373 |
-
},
|
1374 |
-
"truncated": 0,
|
1375 |
-
"non_truncated": 1267,
|
1376 |
-
"padded": 2534,
|
1377 |
-
"non_padded": 0,
|
1378 |
-
"effective_few_shots": 5.0,
|
1379 |
-
"num_truncated_few_shots": 0
|
1380 |
-
},
|
1381 |
-
"harness|gsm8k|5": {
|
1382 |
-
"hashes": {
|
1383 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
1384 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
1385 |
-
"hash_input_tokens": "6af0ae8cfe684f50",
|
1386 |
-
"hash_cont_tokens": "68c8cd5264486757"
|
1387 |
-
},
|
1388 |
-
"truncated": 0,
|
1389 |
-
"non_truncated": 1319,
|
1390 |
-
"padded": 0,
|
1391 |
-
"non_padded": 1319,
|
1392 |
-
"effective_few_shots": 5.0,
|
1393 |
-
"num_truncated_few_shots": 0
|
1394 |
-
}
|
1395 |
-
},
|
1396 |
-
"summary_general": {
|
1397 |
-
"hashes": {
|
1398 |
-
"hash_examples": "3b7fa57a057f9415",
|
1399 |
-
"hash_full_prompts": "63615fc50fc9417c",
|
1400 |
-
"hash_input_tokens": "9c04e828ae29cacc",
|
1401 |
-
"hash_cont_tokens": "9e643dad14bb0f53"
|
1402 |
-
},
|
1403 |
-
"truncated": 0,
|
1404 |
-
"non_truncated": 28659,
|
1405 |
-
"padded": 113460,
|
1406 |
-
"non_padded": 1412,
|
1407 |
-
"num_truncated_few_shots": 0
|
1408 |
-
}
|
1409 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-10.7B-Instruct-v1.0/results_2023-12-13T21-02-33.929144.json
DELETED
@@ -1,1409 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"lighteval_sha": "0e4607eff593f6f842aeaa0e5fa6760f58b9d1e9",
|
4 |
-
"num_few_shot_default": 0,
|
5 |
-
"num_fewshot_seeds": 1,
|
6 |
-
"override_batch_size": 1,
|
7 |
-
"max_samples": null,
|
8 |
-
"job_id": "",
|
9 |
-
"start_time": 118814.185831227,
|
10 |
-
"end_time": 132316.094808787,
|
11 |
-
"total_evaluation_time_secondes": "13501.908977559986",
|
12 |
-
"model_name": "upstage/SOLAR-10.7B-Instruct-v1.0",
|
13 |
-
"model_sha": "ea64f234fbebd485309c58c18c188cf60e53cb82",
|
14 |
-
"model_dtype": "torch.float16",
|
15 |
-
"model_size": "20.08 GB"
|
16 |
-
},
|
17 |
-
"results": {
|
18 |
-
"harness|arc:challenge|25": {
|
19 |
-
"acc": 0.6808873720136519,
|
20 |
-
"acc_stderr": 0.013621696119173307,
|
21 |
-
"acc_norm": 0.7107508532423208,
|
22 |
-
"acc_norm_stderr": 0.01325001257939344
|
23 |
-
},
|
24 |
-
"harness|hellaswag|10": {
|
25 |
-
"acc": 0.7070304720175263,
|
26 |
-
"acc_stderr": 0.004541944342035901,
|
27 |
-
"acc_norm": 0.8815972913762199,
|
28 |
-
"acc_norm_stderr": 0.003224240722351317
|
29 |
-
},
|
30 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
31 |
-
"acc": 0.41,
|
32 |
-
"acc_stderr": 0.049431107042371025,
|
33 |
-
"acc_norm": 0.41,
|
34 |
-
"acc_norm_stderr": 0.049431107042371025
|
35 |
-
},
|
36 |
-
"harness|hendrycksTest-anatomy|5": {
|
37 |
-
"acc": 0.6148148148148148,
|
38 |
-
"acc_stderr": 0.04203921040156279,
|
39 |
-
"acc_norm": 0.6148148148148148,
|
40 |
-
"acc_norm_stderr": 0.04203921040156279
|
41 |
-
},
|
42 |
-
"harness|hendrycksTest-astronomy|5": {
|
43 |
-
"acc": 0.7368421052631579,
|
44 |
-
"acc_stderr": 0.03583496176361072,
|
45 |
-
"acc_norm": 0.7368421052631579,
|
46 |
-
"acc_norm_stderr": 0.03583496176361072
|
47 |
-
},
|
48 |
-
"harness|hendrycksTest-business_ethics|5": {
|
49 |
-
"acc": 0.74,
|
50 |
-
"acc_stderr": 0.0440844002276808,
|
51 |
-
"acc_norm": 0.74,
|
52 |
-
"acc_norm_stderr": 0.0440844002276808
|
53 |
-
},
|
54 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
55 |
-
"acc": 0.6792452830188679,
|
56 |
-
"acc_stderr": 0.02872750295788027,
|
57 |
-
"acc_norm": 0.6792452830188679,
|
58 |
-
"acc_norm_stderr": 0.02872750295788027
|
59 |
-
},
|
60 |
-
"harness|hendrycksTest-college_biology|5": {
|
61 |
-
"acc": 0.7638888888888888,
|
62 |
-
"acc_stderr": 0.03551446610810826,
|
63 |
-
"acc_norm": 0.7638888888888888,
|
64 |
-
"acc_norm_stderr": 0.03551446610810826
|
65 |
-
},
|
66 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
67 |
-
"acc": 0.44,
|
68 |
-
"acc_stderr": 0.04988876515698589,
|
69 |
-
"acc_norm": 0.44,
|
70 |
-
"acc_norm_stderr": 0.04988876515698589
|
71 |
-
},
|
72 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
73 |
-
"acc": 0.52,
|
74 |
-
"acc_stderr": 0.05021167315686779,
|
75 |
-
"acc_norm": 0.52,
|
76 |
-
"acc_norm_stderr": 0.05021167315686779
|
77 |
-
},
|
78 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
79 |
-
"acc": 0.31,
|
80 |
-
"acc_stderr": 0.04648231987117316,
|
81 |
-
"acc_norm": 0.31,
|
82 |
-
"acc_norm_stderr": 0.04648231987117316
|
83 |
-
},
|
84 |
-
"harness|hendrycksTest-college_medicine|5": {
|
85 |
-
"acc": 0.6647398843930635,
|
86 |
-
"acc_stderr": 0.03599586301247077,
|
87 |
-
"acc_norm": 0.6647398843930635,
|
88 |
-
"acc_norm_stderr": 0.03599586301247077
|
89 |
-
},
|
90 |
-
"harness|hendrycksTest-college_physics|5": {
|
91 |
-
"acc": 0.38235294117647056,
|
92 |
-
"acc_stderr": 0.04835503696107223,
|
93 |
-
"acc_norm": 0.38235294117647056,
|
94 |
-
"acc_norm_stderr": 0.04835503696107223
|
95 |
-
},
|
96 |
-
"harness|hendrycksTest-computer_security|5": {
|
97 |
-
"acc": 0.76,
|
98 |
-
"acc_stderr": 0.042923469599092816,
|
99 |
-
"acc_norm": 0.76,
|
100 |
-
"acc_norm_stderr": 0.042923469599092816
|
101 |
-
},
|
102 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
103 |
-
"acc": 0.6297872340425532,
|
104 |
-
"acc_stderr": 0.03156564682236785,
|
105 |
-
"acc_norm": 0.6297872340425532,
|
106 |
-
"acc_norm_stderr": 0.03156564682236785
|
107 |
-
},
|
108 |
-
"harness|hendrycksTest-econometrics|5": {
|
109 |
-
"acc": 0.5,
|
110 |
-
"acc_stderr": 0.047036043419179864,
|
111 |
-
"acc_norm": 0.5,
|
112 |
-
"acc_norm_stderr": 0.047036043419179864
|
113 |
-
},
|
114 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
115 |
-
"acc": 0.6413793103448275,
|
116 |
-
"acc_stderr": 0.039966295748767186,
|
117 |
-
"acc_norm": 0.6413793103448275,
|
118 |
-
"acc_norm_stderr": 0.039966295748767186
|
119 |
-
},
|
120 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
121 |
-
"acc": 0.47883597883597884,
|
122 |
-
"acc_stderr": 0.025728230952130726,
|
123 |
-
"acc_norm": 0.47883597883597884,
|
124 |
-
"acc_norm_stderr": 0.025728230952130726
|
125 |
-
},
|
126 |
-
"harness|hendrycksTest-formal_logic|5": {
|
127 |
-
"acc": 0.4444444444444444,
|
128 |
-
"acc_stderr": 0.044444444444444495,
|
129 |
-
"acc_norm": 0.4444444444444444,
|
130 |
-
"acc_norm_stderr": 0.044444444444444495
|
131 |
-
},
|
132 |
-
"harness|hendrycksTest-global_facts|5": {
|
133 |
-
"acc": 0.36,
|
134 |
-
"acc_stderr": 0.048241815132442176,
|
135 |
-
"acc_norm": 0.36,
|
136 |
-
"acc_norm_stderr": 0.048241815132442176
|
137 |
-
},
|
138 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
139 |
-
"acc": 0.8032258064516129,
|
140 |
-
"acc_stderr": 0.022616409420742025,
|
141 |
-
"acc_norm": 0.8032258064516129,
|
142 |
-
"acc_norm_stderr": 0.022616409420742025
|
143 |
-
},
|
144 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
145 |
-
"acc": 0.5172413793103449,
|
146 |
-
"acc_stderr": 0.03515895551165698,
|
147 |
-
"acc_norm": 0.5172413793103449,
|
148 |
-
"acc_norm_stderr": 0.03515895551165698
|
149 |
-
},
|
150 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
151 |
-
"acc": 0.72,
|
152 |
-
"acc_stderr": 0.04512608598542128,
|
153 |
-
"acc_norm": 0.72,
|
154 |
-
"acc_norm_stderr": 0.04512608598542128
|
155 |
-
},
|
156 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
157 |
-
"acc": 0.8,
|
158 |
-
"acc_stderr": 0.031234752377721175,
|
159 |
-
"acc_norm": 0.8,
|
160 |
-
"acc_norm_stderr": 0.031234752377721175
|
161 |
-
},
|
162 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
163 |
-
"acc": 0.8737373737373737,
|
164 |
-
"acc_stderr": 0.02366435940288023,
|
165 |
-
"acc_norm": 0.8737373737373737,
|
166 |
-
"acc_norm_stderr": 0.02366435940288023
|
167 |
-
},
|
168 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
169 |
-
"acc": 0.9067357512953368,
|
170 |
-
"acc_stderr": 0.02098685459328973,
|
171 |
-
"acc_norm": 0.9067357512953368,
|
172 |
-
"acc_norm_stderr": 0.02098685459328973
|
173 |
-
},
|
174 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
175 |
-
"acc": 0.6615384615384615,
|
176 |
-
"acc_stderr": 0.023991500500313036,
|
177 |
-
"acc_norm": 0.6615384615384615,
|
178 |
-
"acc_norm_stderr": 0.023991500500313036
|
179 |
-
},
|
180 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
181 |
-
"acc": 0.3814814814814815,
|
182 |
-
"acc_stderr": 0.029616718927497593,
|
183 |
-
"acc_norm": 0.3814814814814815,
|
184 |
-
"acc_norm_stderr": 0.029616718927497593
|
185 |
-
},
|
186 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
187 |
-
"acc": 0.7184873949579832,
|
188 |
-
"acc_stderr": 0.02921354941437217,
|
189 |
-
"acc_norm": 0.7184873949579832,
|
190 |
-
"acc_norm_stderr": 0.02921354941437217
|
191 |
-
},
|
192 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
193 |
-
"acc": 0.3708609271523179,
|
194 |
-
"acc_stderr": 0.03943966699183629,
|
195 |
-
"acc_norm": 0.3708609271523179,
|
196 |
-
"acc_norm_stderr": 0.03943966699183629
|
197 |
-
},
|
198 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
199 |
-
"acc": 0.8477064220183487,
|
200 |
-
"acc_stderr": 0.015405084393157074,
|
201 |
-
"acc_norm": 0.8477064220183487,
|
202 |
-
"acc_norm_stderr": 0.015405084393157074
|
203 |
-
},
|
204 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
205 |
-
"acc": 0.5555555555555556,
|
206 |
-
"acc_stderr": 0.03388857118502325,
|
207 |
-
"acc_norm": 0.5555555555555556,
|
208 |
-
"acc_norm_stderr": 0.03388857118502325
|
209 |
-
},
|
210 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
211 |
-
"acc": 0.8480392156862745,
|
212 |
-
"acc_stderr": 0.0251956584289318,
|
213 |
-
"acc_norm": 0.8480392156862745,
|
214 |
-
"acc_norm_stderr": 0.0251956584289318
|
215 |
-
},
|
216 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
217 |
-
"acc": 0.8565400843881856,
|
218 |
-
"acc_stderr": 0.022818291821017012,
|
219 |
-
"acc_norm": 0.8565400843881856,
|
220 |
-
"acc_norm_stderr": 0.022818291821017012
|
221 |
-
},
|
222 |
-
"harness|hendrycksTest-human_aging|5": {
|
223 |
-
"acc": 0.6816143497757847,
|
224 |
-
"acc_stderr": 0.03126580522513713,
|
225 |
-
"acc_norm": 0.6816143497757847,
|
226 |
-
"acc_norm_stderr": 0.03126580522513713
|
227 |
-
},
|
228 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
229 |
-
"acc": 0.7480916030534351,
|
230 |
-
"acc_stderr": 0.03807387116306086,
|
231 |
-
"acc_norm": 0.7480916030534351,
|
232 |
-
"acc_norm_stderr": 0.03807387116306086
|
233 |
-
},
|
234 |
-
"harness|hendrycksTest-international_law|5": {
|
235 |
-
"acc": 0.7768595041322314,
|
236 |
-
"acc_stderr": 0.03800754475228733,
|
237 |
-
"acc_norm": 0.7768595041322314,
|
238 |
-
"acc_norm_stderr": 0.03800754475228733
|
239 |
-
},
|
240 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
241 |
-
"acc": 0.8055555555555556,
|
242 |
-
"acc_stderr": 0.038260763248848646,
|
243 |
-
"acc_norm": 0.8055555555555556,
|
244 |
-
"acc_norm_stderr": 0.038260763248848646
|
245 |
-
},
|
246 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
247 |
-
"acc": 0.754601226993865,
|
248 |
-
"acc_stderr": 0.03380939813943354,
|
249 |
-
"acc_norm": 0.754601226993865,
|
250 |
-
"acc_norm_stderr": 0.03380939813943354
|
251 |
-
},
|
252 |
-
"harness|hendrycksTest-machine_learning|5": {
|
253 |
-
"acc": 0.44642857142857145,
|
254 |
-
"acc_stderr": 0.047184714852195886,
|
255 |
-
"acc_norm": 0.44642857142857145,
|
256 |
-
"acc_norm_stderr": 0.047184714852195886
|
257 |
-
},
|
258 |
-
"harness|hendrycksTest-management|5": {
|
259 |
-
"acc": 0.8252427184466019,
|
260 |
-
"acc_stderr": 0.03760178006026621,
|
261 |
-
"acc_norm": 0.8252427184466019,
|
262 |
-
"acc_norm_stderr": 0.03760178006026621
|
263 |
-
},
|
264 |
-
"harness|hendrycksTest-marketing|5": {
|
265 |
-
"acc": 0.8589743589743589,
|
266 |
-
"acc_stderr": 0.02280138253459753,
|
267 |
-
"acc_norm": 0.8589743589743589,
|
268 |
-
"acc_norm_stderr": 0.02280138253459753
|
269 |
-
},
|
270 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
271 |
-
"acc": 0.71,
|
272 |
-
"acc_stderr": 0.045604802157206845,
|
273 |
-
"acc_norm": 0.71,
|
274 |
-
"acc_norm_stderr": 0.045604802157206845
|
275 |
-
},
|
276 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
277 |
-
"acc": 0.8033205619412516,
|
278 |
-
"acc_stderr": 0.014214138556913917,
|
279 |
-
"acc_norm": 0.8033205619412516,
|
280 |
-
"acc_norm_stderr": 0.014214138556913917
|
281 |
-
},
|
282 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
283 |
-
"acc": 0.7601156069364162,
|
284 |
-
"acc_stderr": 0.022989592543123567,
|
285 |
-
"acc_norm": 0.7601156069364162,
|
286 |
-
"acc_norm_stderr": 0.022989592543123567
|
287 |
-
},
|
288 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
289 |
-
"acc": 0.39329608938547483,
|
290 |
-
"acc_stderr": 0.016337268694270112,
|
291 |
-
"acc_norm": 0.39329608938547483,
|
292 |
-
"acc_norm_stderr": 0.016337268694270112
|
293 |
-
},
|
294 |
-
"harness|hendrycksTest-nutrition|5": {
|
295 |
-
"acc": 0.7581699346405228,
|
296 |
-
"acc_stderr": 0.024518195641879334,
|
297 |
-
"acc_norm": 0.7581699346405228,
|
298 |
-
"acc_norm_stderr": 0.024518195641879334
|
299 |
-
},
|
300 |
-
"harness|hendrycksTest-philosophy|5": {
|
301 |
-
"acc": 0.729903536977492,
|
302 |
-
"acc_stderr": 0.02521804037341062,
|
303 |
-
"acc_norm": 0.729903536977492,
|
304 |
-
"acc_norm_stderr": 0.02521804037341062
|
305 |
-
},
|
306 |
-
"harness|hendrycksTest-prehistory|5": {
|
307 |
-
"acc": 0.7901234567901234,
|
308 |
-
"acc_stderr": 0.02265834408598137,
|
309 |
-
"acc_norm": 0.7901234567901234,
|
310 |
-
"acc_norm_stderr": 0.02265834408598137
|
311 |
-
},
|
312 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
313 |
-
"acc": 0.49645390070921985,
|
314 |
-
"acc_stderr": 0.02982674915328092,
|
315 |
-
"acc_norm": 0.49645390070921985,
|
316 |
-
"acc_norm_stderr": 0.02982674915328092
|
317 |
-
},
|
318 |
-
"harness|hendrycksTest-professional_law|5": {
|
319 |
-
"acc": 0.4934810951760104,
|
320 |
-
"acc_stderr": 0.012769150688867503,
|
321 |
-
"acc_norm": 0.4934810951760104,
|
322 |
-
"acc_norm_stderr": 0.012769150688867503
|
323 |
-
},
|
324 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
325 |
-
"acc": 0.7389705882352942,
|
326 |
-
"acc_stderr": 0.026679252270103135,
|
327 |
-
"acc_norm": 0.7389705882352942,
|
328 |
-
"acc_norm_stderr": 0.026679252270103135
|
329 |
-
},
|
330 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
331 |
-
"acc": 0.6911764705882353,
|
332 |
-
"acc_stderr": 0.018690850273595294,
|
333 |
-
"acc_norm": 0.6911764705882353,
|
334 |
-
"acc_norm_stderr": 0.018690850273595294
|
335 |
-
},
|
336 |
-
"harness|hendrycksTest-public_relations|5": {
|
337 |
-
"acc": 0.6909090909090909,
|
338 |
-
"acc_stderr": 0.044262946482000985,
|
339 |
-
"acc_norm": 0.6909090909090909,
|
340 |
-
"acc_norm_stderr": 0.044262946482000985
|
341 |
-
},
|
342 |
-
"harness|hendrycksTest-security_studies|5": {
|
343 |
-
"acc": 0.7346938775510204,
|
344 |
-
"acc_stderr": 0.0282638899437846,
|
345 |
-
"acc_norm": 0.7346938775510204,
|
346 |
-
"acc_norm_stderr": 0.0282638899437846
|
347 |
-
},
|
348 |
-
"harness|hendrycksTest-sociology|5": {
|
349 |
-
"acc": 0.8407960199004975,
|
350 |
-
"acc_stderr": 0.02587064676616913,
|
351 |
-
"acc_norm": 0.8407960199004975,
|
352 |
-
"acc_norm_stderr": 0.02587064676616913
|
353 |
-
},
|
354 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
355 |
-
"acc": 0.9,
|
356 |
-
"acc_stderr": 0.030151134457776334,
|
357 |
-
"acc_norm": 0.9,
|
358 |
-
"acc_norm_stderr": 0.030151134457776334
|
359 |
-
},
|
360 |
-
"harness|hendrycksTest-virology|5": {
|
361 |
-
"acc": 0.5843373493975904,
|
362 |
-
"acc_stderr": 0.03836722176598052,
|
363 |
-
"acc_norm": 0.5843373493975904,
|
364 |
-
"acc_norm_stderr": 0.03836722176598052
|
365 |
-
},
|
366 |
-
"harness|hendrycksTest-world_religions|5": {
|
367 |
-
"acc": 0.7894736842105263,
|
368 |
-
"acc_stderr": 0.03126781714663179,
|
369 |
-
"acc_norm": 0.7894736842105263,
|
370 |
-
"acc_norm_stderr": 0.03126781714663179
|
371 |
-
},
|
372 |
-
"harness|truthfulqa:mc|0": {
|
373 |
-
"mc1": 0.5667074663402693,
|
374 |
-
"mc1_stderr": 0.017347024450107485,
|
375 |
-
"mc2": 0.7142943510205136,
|
376 |
-
"mc2_stderr": 0.015024530295000761
|
377 |
-
},
|
378 |
-
"harness|winogrande|5": {
|
379 |
-
"acc": 0.8358326756116812,
|
380 |
-
"acc_stderr": 0.01041084977522279
|
381 |
-
},
|
382 |
-
"harness|gsm8k|5": {
|
383 |
-
"acc": 0.6474601971190296,
|
384 |
-
"acc_stderr": 0.013159909755930337
|
385 |
-
},
|
386 |
-
"all": {
|
387 |
-
"acc": 0.6657586984797939,
|
388 |
-
"acc_stderr": 0.03165995758526614,
|
389 |
-
"acc_norm": 0.6666511531376961,
|
390 |
-
"acc_norm_stderr": 0.0323050384069596,
|
391 |
-
"mc1": 0.5667074663402693,
|
392 |
-
"mc1_stderr": 0.017347024450107485,
|
393 |
-
"mc2": 0.7142943510205136,
|
394 |
-
"mc2_stderr": 0.015024530295000761
|
395 |
-
}
|
396 |
-
},
|
397 |
-
"versions": {
|
398 |
-
"all": 0,
|
399 |
-
"harness|arc:challenge|25": 0,
|
400 |
-
"harness|gsm8k|5": 0,
|
401 |
-
"harness|hellaswag|10": 0,
|
402 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
403 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
404 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
405 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
406 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
407 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
408 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
409 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
410 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
411 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
412 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
413 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
414 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
415 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
416 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
417 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
418 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
419 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
420 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
421 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
422 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
423 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
424 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
425 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
426 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
427 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
428 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
429 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
430 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
431 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
432 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
433 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
434 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
435 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
436 |
-
"harness|hendrycksTest-international_law|5": 1,
|
437 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
438 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
439 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
440 |
-
"harness|hendrycksTest-management|5": 1,
|
441 |
-
"harness|hendrycksTest-marketing|5": 1,
|
442 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
443 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
444 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
445 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
446 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
447 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
448 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
449 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
450 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
451 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
452 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
453 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
454 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
455 |
-
"harness|hendrycksTest-sociology|5": 1,
|
456 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
457 |
-
"harness|hendrycksTest-virology|5": 1,
|
458 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
459 |
-
"harness|truthfulqa:mc|0": 1,
|
460 |
-
"harness|winogrande|5": 0
|
461 |
-
},
|
462 |
-
"config_tasks": {
|
463 |
-
"harness|arc:challenge": "LM Harness task",
|
464 |
-
"harness|gsm8k": "LM Harness task",
|
465 |
-
"harness|hellaswag": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
507 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
508 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
509 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
510 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
511 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
512 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
513 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
514 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
515 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
516 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
517 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
518 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
519 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
520 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
521 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
522 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
523 |
-
"harness|truthfulqa:mc": "LM Harness task",
|
524 |
-
"harness|winogrande": "LM Harness task"
|
525 |
-
},
|
526 |
-
"summary_tasks": {
|
527 |
-
"harness|arc:challenge|25": {
|
528 |
-
"hashes": {
|
529 |
-
"hash_examples": "17b0cae357c0259e",
|
530 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
531 |
-
"hash_input_tokens": "9bcd0d1d37471713",
|
532 |
-
"hash_cont_tokens": "289aa98c400841d8"
|
533 |
-
},
|
534 |
-
"truncated": 0,
|
535 |
-
"non_truncated": 1172,
|
536 |
-
"padded": 4670,
|
537 |
-
"non_padded": 17,
|
538 |
-
"effective_few_shots": 25.0,
|
539 |
-
"num_truncated_few_shots": 0
|
540 |
-
},
|
541 |
-
"harness|hellaswag|10": {
|
542 |
-
"hashes": {
|
543 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
544 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
545 |
-
"hash_input_tokens": "80b8c6d79740318e",
|
546 |
-
"hash_cont_tokens": "ac460260c3e6efc9"
|
547 |
-
},
|
548 |
-
"truncated": 0,
|
549 |
-
"non_truncated": 10042,
|
550 |
-
"padded": 40101,
|
551 |
-
"non_padded": 67,
|
552 |
-
"effective_few_shots": 10.0,
|
553 |
-
"num_truncated_few_shots": 0
|
554 |
-
},
|
555 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
556 |
-
"hashes": {
|
557 |
-
"hash_examples": "280f9f325b40559a",
|
558 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
559 |
-
"hash_input_tokens": "b813d36287c6556c",
|
560 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
561 |
-
},
|
562 |
-
"truncated": 0,
|
563 |
-
"non_truncated": 100,
|
564 |
-
"padded": 400,
|
565 |
-
"non_padded": 0,
|
566 |
-
"effective_few_shots": 5.0,
|
567 |
-
"num_truncated_few_shots": 0
|
568 |
-
},
|
569 |
-
"harness|hendrycksTest-anatomy|5": {
|
570 |
-
"hashes": {
|
571 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
572 |
-
"hash_full_prompts": "516f74bef25df620",
|
573 |
-
"hash_input_tokens": "09dc2380497f7a47",
|
574 |
-
"hash_cont_tokens": "a52a4f60d98cbe5c"
|
575 |
-
},
|
576 |
-
"truncated": 0,
|
577 |
-
"non_truncated": 135,
|
578 |
-
"padded": 540,
|
579 |
-
"non_padded": 0,
|
580 |
-
"effective_few_shots": 5.0,
|
581 |
-
"num_truncated_few_shots": 0
|
582 |
-
},
|
583 |
-
"harness|hendrycksTest-astronomy|5": {
|
584 |
-
"hashes": {
|
585 |
-
"hash_examples": "7d587b908da4d762",
|
586 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
587 |
-
"hash_input_tokens": "68ca3220b0fdd1f3",
|
588 |
-
"hash_cont_tokens": "10f7d8eeba97841d"
|
589 |
-
},
|
590 |
-
"truncated": 0,
|
591 |
-
"non_truncated": 152,
|
592 |
-
"padded": 608,
|
593 |
-
"non_padded": 0,
|
594 |
-
"effective_few_shots": 5.0,
|
595 |
-
"num_truncated_few_shots": 0
|
596 |
-
},
|
597 |
-
"harness|hendrycksTest-business_ethics|5": {
|
598 |
-
"hashes": {
|
599 |
-
"hash_examples": "33e51740670de686",
|
600 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
601 |
-
"hash_input_tokens": "bd14ef1320de241e",
|
602 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
603 |
-
},
|
604 |
-
"truncated": 0,
|
605 |
-
"non_truncated": 100,
|
606 |
-
"padded": 400,
|
607 |
-
"non_padded": 0,
|
608 |
-
"effective_few_shots": 5.0,
|
609 |
-
"num_truncated_few_shots": 0
|
610 |
-
},
|
611 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
612 |
-
"hashes": {
|
613 |
-
"hash_examples": "f3366dbe7eefffa4",
|
614 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
615 |
-
"hash_input_tokens": "d96186ab98017c43",
|
616 |
-
"hash_cont_tokens": "edef9975ba9165b5"
|
617 |
-
},
|
618 |
-
"truncated": 0,
|
619 |
-
"non_truncated": 265,
|
620 |
-
"padded": 1060,
|
621 |
-
"non_padded": 0,
|
622 |
-
"effective_few_shots": 5.0,
|
623 |
-
"num_truncated_few_shots": 0
|
624 |
-
},
|
625 |
-
"harness|hendrycksTest-college_biology|5": {
|
626 |
-
"hashes": {
|
627 |
-
"hash_examples": "ca2b6753a0193e7f",
|
628 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
629 |
-
"hash_input_tokens": "424136b34e95b200",
|
630 |
-
"hash_cont_tokens": "0aa103ec6602280b"
|
631 |
-
},
|
632 |
-
"truncated": 0,
|
633 |
-
"non_truncated": 144,
|
634 |
-
"padded": 576,
|
635 |
-
"non_padded": 0,
|
636 |
-
"effective_few_shots": 5.0,
|
637 |
-
"num_truncated_few_shots": 0
|
638 |
-
},
|
639 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
640 |
-
"hashes": {
|
641 |
-
"hash_examples": "22ff85f1d34f42d1",
|
642 |
-
"hash_full_prompts": "242c9be6da583e95",
|
643 |
-
"hash_input_tokens": "8dd8b80e336bbe54",
|
644 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
645 |
-
},
|
646 |
-
"truncated": 0,
|
647 |
-
"non_truncated": 100,
|
648 |
-
"padded": 400,
|
649 |
-
"non_padded": 0,
|
650 |
-
"effective_few_shots": 5.0,
|
651 |
-
"num_truncated_few_shots": 0
|
652 |
-
},
|
653 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
654 |
-
"hashes": {
|
655 |
-
"hash_examples": "30318289d717a5cf",
|
656 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
657 |
-
"hash_input_tokens": "145d4cef8ca2261d",
|
658 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
659 |
-
},
|
660 |
-
"truncated": 0,
|
661 |
-
"non_truncated": 100,
|
662 |
-
"padded": 400,
|
663 |
-
"non_padded": 0,
|
664 |
-
"effective_few_shots": 5.0,
|
665 |
-
"num_truncated_few_shots": 0
|
666 |
-
},
|
667 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
668 |
-
"hashes": {
|
669 |
-
"hash_examples": "4944d1f0b6b5d911",
|
670 |
-
"hash_full_prompts": "770bc4281c973190",
|
671 |
-
"hash_input_tokens": "561995d32d2b25c4",
|
672 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
673 |
-
},
|
674 |
-
"truncated": 0,
|
675 |
-
"non_truncated": 100,
|
676 |
-
"padded": 400,
|
677 |
-
"non_padded": 0,
|
678 |
-
"effective_few_shots": 5.0,
|
679 |
-
"num_truncated_few_shots": 0
|
680 |
-
},
|
681 |
-
"harness|hendrycksTest-college_medicine|5": {
|
682 |
-
"hashes": {
|
683 |
-
"hash_examples": "dd69cc33381275af",
|
684 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
685 |
-
"hash_input_tokens": "6a258a9d4418599c",
|
686 |
-
"hash_cont_tokens": "1979021dbc698754"
|
687 |
-
},
|
688 |
-
"truncated": 0,
|
689 |
-
"non_truncated": 173,
|
690 |
-
"padded": 692,
|
691 |
-
"non_padded": 0,
|
692 |
-
"effective_few_shots": 5.0,
|
693 |
-
"num_truncated_few_shots": 0
|
694 |
-
},
|
695 |
-
"harness|hendrycksTest-college_physics|5": {
|
696 |
-
"hashes": {
|
697 |
-
"hash_examples": "875dd26d22655b0d",
|
698 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
699 |
-
"hash_input_tokens": "fa5e0d5b5f97b66a",
|
700 |
-
"hash_cont_tokens": "7cf7fe2bab00acbd"
|
701 |
-
},
|
702 |
-
"truncated": 0,
|
703 |
-
"non_truncated": 102,
|
704 |
-
"padded": 408,
|
705 |
-
"non_padded": 0,
|
706 |
-
"effective_few_shots": 5.0,
|
707 |
-
"num_truncated_few_shots": 0
|
708 |
-
},
|
709 |
-
"harness|hendrycksTest-computer_security|5": {
|
710 |
-
"hashes": {
|
711 |
-
"hash_examples": "006451eedc0ededb",
|
712 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
713 |
-
"hash_input_tokens": "07d27397edfae492",
|
714 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
715 |
-
},
|
716 |
-
"truncated": 0,
|
717 |
-
"non_truncated": 100,
|
718 |
-
"padded": 400,
|
719 |
-
"non_padded": 0,
|
720 |
-
"effective_few_shots": 5.0,
|
721 |
-
"num_truncated_few_shots": 0
|
722 |
-
},
|
723 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
724 |
-
"hashes": {
|
725 |
-
"hash_examples": "8874ece872d2ca4c",
|
726 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
727 |
-
"hash_input_tokens": "da5e6c3c8eb17da6",
|
728 |
-
"hash_cont_tokens": "903f64eed2b0d217"
|
729 |
-
},
|
730 |
-
"truncated": 0,
|
731 |
-
"non_truncated": 235,
|
732 |
-
"padded": 940,
|
733 |
-
"non_padded": 0,
|
734 |
-
"effective_few_shots": 5.0,
|
735 |
-
"num_truncated_few_shots": 0
|
736 |
-
},
|
737 |
-
"harness|hendrycksTest-econometrics|5": {
|
738 |
-
"hashes": {
|
739 |
-
"hash_examples": "64d3623b0bfaa43f",
|
740 |
-
"hash_full_prompts": "612f340fae41338d",
|
741 |
-
"hash_input_tokens": "f6ba8e358bdb523e",
|
742 |
-
"hash_cont_tokens": "721ae6c5302c4bf2"
|
743 |
-
},
|
744 |
-
"truncated": 0,
|
745 |
-
"non_truncated": 114,
|
746 |
-
"padded": 456,
|
747 |
-
"non_padded": 0,
|
748 |
-
"effective_few_shots": 5.0,
|
749 |
-
"num_truncated_few_shots": 0
|
750 |
-
},
|
751 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
752 |
-
"hashes": {
|
753 |
-
"hash_examples": "e98f51780c674d7e",
|
754 |
-
"hash_full_prompts": "10275b312d812ae6",
|
755 |
-
"hash_input_tokens": "b2459da4c5ca8590",
|
756 |
-
"hash_cont_tokens": "15a738960ed3e587"
|
757 |
-
},
|
758 |
-
"truncated": 0,
|
759 |
-
"non_truncated": 145,
|
760 |
-
"padded": 575,
|
761 |
-
"non_padded": 5,
|
762 |
-
"effective_few_shots": 5.0,
|
763 |
-
"num_truncated_few_shots": 0
|
764 |
-
},
|
765 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
766 |
-
"hashes": {
|
767 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
768 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
769 |
-
"hash_input_tokens": "0b969d9ad706a13a",
|
770 |
-
"hash_cont_tokens": "c96470462fc71683"
|
771 |
-
},
|
772 |
-
"truncated": 0,
|
773 |
-
"non_truncated": 378,
|
774 |
-
"padded": 1512,
|
775 |
-
"non_padded": 0,
|
776 |
-
"effective_few_shots": 5.0,
|
777 |
-
"num_truncated_few_shots": 0
|
778 |
-
},
|
779 |
-
"harness|hendrycksTest-formal_logic|5": {
|
780 |
-
"hashes": {
|
781 |
-
"hash_examples": "5a6525665f63ea72",
|
782 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
783 |
-
"hash_input_tokens": "02bc3eb5f90da86e",
|
784 |
-
"hash_cont_tokens": "0e1ce025c9d6ee7e"
|
785 |
-
},
|
786 |
-
"truncated": 0,
|
787 |
-
"non_truncated": 126,
|
788 |
-
"padded": 504,
|
789 |
-
"non_padded": 0,
|
790 |
-
"effective_few_shots": 5.0,
|
791 |
-
"num_truncated_few_shots": 0
|
792 |
-
},
|
793 |
-
"harness|hendrycksTest-global_facts|5": {
|
794 |
-
"hashes": {
|
795 |
-
"hash_examples": "371d70d743b2b89b",
|
796 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
797 |
-
"hash_input_tokens": "3d5106918bcbeb43",
|
798 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
799 |
-
},
|
800 |
-
"truncated": 0,
|
801 |
-
"non_truncated": 100,
|
802 |
-
"padded": 400,
|
803 |
-
"non_padded": 0,
|
804 |
-
"effective_few_shots": 5.0,
|
805 |
-
"num_truncated_few_shots": 0
|
806 |
-
},
|
807 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
808 |
-
"hashes": {
|
809 |
-
"hash_examples": "a79e1018b1674052",
|
810 |
-
"hash_full_prompts": "e624e26ede922561",
|
811 |
-
"hash_input_tokens": "7b089392db2dabbd",
|
812 |
-
"hash_cont_tokens": "e34d57f7d3c4ca16"
|
813 |
-
},
|
814 |
-
"truncated": 0,
|
815 |
-
"non_truncated": 310,
|
816 |
-
"padded": 1240,
|
817 |
-
"non_padded": 0,
|
818 |
-
"effective_few_shots": 5.0,
|
819 |
-
"num_truncated_few_shots": 0
|
820 |
-
},
|
821 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
822 |
-
"hashes": {
|
823 |
-
"hash_examples": "44bfc25c389f0e03",
|
824 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
825 |
-
"hash_input_tokens": "ba90b2ffed1c067d",
|
826 |
-
"hash_cont_tokens": "e8482d44df4b3740"
|
827 |
-
},
|
828 |
-
"truncated": 0,
|
829 |
-
"non_truncated": 203,
|
830 |
-
"padded": 812,
|
831 |
-
"non_padded": 0,
|
832 |
-
"effective_few_shots": 5.0,
|
833 |
-
"num_truncated_few_shots": 0
|
834 |
-
},
|
835 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
836 |
-
"hashes": {
|
837 |
-
"hash_examples": "8b8cdb1084f24169",
|
838 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
839 |
-
"hash_input_tokens": "60eeec309ef0717f",
|
840 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
841 |
-
},
|
842 |
-
"truncated": 0,
|
843 |
-
"non_truncated": 100,
|
844 |
-
"padded": 400,
|
845 |
-
"non_padded": 0,
|
846 |
-
"effective_few_shots": 5.0,
|
847 |
-
"num_truncated_few_shots": 0
|
848 |
-
},
|
849 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
850 |
-
"hashes": {
|
851 |
-
"hash_examples": "11cd32d0ef440171",
|
852 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
853 |
-
"hash_input_tokens": "5e5e8bf3808e0ead",
|
854 |
-
"hash_cont_tokens": "d63e679a49418339"
|
855 |
-
},
|
856 |
-
"truncated": 0,
|
857 |
-
"non_truncated": 165,
|
858 |
-
"padded": 656,
|
859 |
-
"non_padded": 4,
|
860 |
-
"effective_few_shots": 5.0,
|
861 |
-
"num_truncated_few_shots": 0
|
862 |
-
},
|
863 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
864 |
-
"hashes": {
|
865 |
-
"hash_examples": "b60019b9e80b642f",
|
866 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
867 |
-
"hash_input_tokens": "4da9b741d4e7ea78",
|
868 |
-
"hash_cont_tokens": "d78483e286d06f1a"
|
869 |
-
},
|
870 |
-
"truncated": 0,
|
871 |
-
"non_truncated": 198,
|
872 |
-
"padded": 792,
|
873 |
-
"non_padded": 0,
|
874 |
-
"effective_few_shots": 5.0,
|
875 |
-
"num_truncated_few_shots": 0
|
876 |
-
},
|
877 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
878 |
-
"hashes": {
|
879 |
-
"hash_examples": "d221ec983d143dc3",
|
880 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
881 |
-
"hash_input_tokens": "acb4bc872ac86ed7",
|
882 |
-
"hash_cont_tokens": "691cdff71ff5fe57"
|
883 |
-
},
|
884 |
-
"truncated": 0,
|
885 |
-
"non_truncated": 193,
|
886 |
-
"padded": 772,
|
887 |
-
"non_padded": 0,
|
888 |
-
"effective_few_shots": 5.0,
|
889 |
-
"num_truncated_few_shots": 0
|
890 |
-
},
|
891 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
892 |
-
"hashes": {
|
893 |
-
"hash_examples": "59c2915cacfd3fbb",
|
894 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
895 |
-
"hash_input_tokens": "840fc6403eb69ab0",
|
896 |
-
"hash_cont_tokens": "d5ad4c5bdca967ad"
|
897 |
-
},
|
898 |
-
"truncated": 0,
|
899 |
-
"non_truncated": 390,
|
900 |
-
"padded": 1560,
|
901 |
-
"non_padded": 0,
|
902 |
-
"effective_few_shots": 5.0,
|
903 |
-
"num_truncated_few_shots": 0
|
904 |
-
},
|
905 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
906 |
-
"hashes": {
|
907 |
-
"hash_examples": "1f8ac897608de342",
|
908 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
909 |
-
"hash_input_tokens": "3629a7f2cd17faeb",
|
910 |
-
"hash_cont_tokens": "8f631ca5687dd0d4"
|
911 |
-
},
|
912 |
-
"truncated": 0,
|
913 |
-
"non_truncated": 270,
|
914 |
-
"padded": 1080,
|
915 |
-
"non_padded": 0,
|
916 |
-
"effective_few_shots": 5.0,
|
917 |
-
"num_truncated_few_shots": 0
|
918 |
-
},
|
919 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
920 |
-
"hashes": {
|
921 |
-
"hash_examples": "ead6a0f2f6c83370",
|
922 |
-
"hash_full_prompts": "bfc393381298609e",
|
923 |
-
"hash_input_tokens": "6846f684260e3997",
|
924 |
-
"hash_cont_tokens": "7321048a28451473"
|
925 |
-
},
|
926 |
-
"truncated": 0,
|
927 |
-
"non_truncated": 238,
|
928 |
-
"padded": 952,
|
929 |
-
"non_padded": 0,
|
930 |
-
"effective_few_shots": 5.0,
|
931 |
-
"num_truncated_few_shots": 0
|
932 |
-
},
|
933 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
934 |
-
"hashes": {
|
935 |
-
"hash_examples": "c3f2025990afec64",
|
936 |
-
"hash_full_prompts": "fc78b4997e436734",
|
937 |
-
"hash_input_tokens": "85aee25d6bdad94a",
|
938 |
-
"hash_cont_tokens": "bb137581f269861c"
|
939 |
-
},
|
940 |
-
"truncated": 0,
|
941 |
-
"non_truncated": 151,
|
942 |
-
"padded": 604,
|
943 |
-
"non_padded": 0,
|
944 |
-
"effective_few_shots": 5.0,
|
945 |
-
"num_truncated_few_shots": 0
|
946 |
-
},
|
947 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
948 |
-
"hashes": {
|
949 |
-
"hash_examples": "21f8aab618f6d636",
|
950 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
951 |
-
"hash_input_tokens": "290b66d6d666a35f",
|
952 |
-
"hash_cont_tokens": "b455cab2675bd863"
|
953 |
-
},
|
954 |
-
"truncated": 0,
|
955 |
-
"non_truncated": 545,
|
956 |
-
"padded": 2180,
|
957 |
-
"non_padded": 0,
|
958 |
-
"effective_few_shots": 5.0,
|
959 |
-
"num_truncated_few_shots": 0
|
960 |
-
},
|
961 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
962 |
-
"hashes": {
|
963 |
-
"hash_examples": "2386a60a11fc5de3",
|
964 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
965 |
-
"hash_input_tokens": "a77a7668b437bc82",
|
966 |
-
"hash_cont_tokens": "1b3196fec7e58037"
|
967 |
-
},
|
968 |
-
"truncated": 0,
|
969 |
-
"non_truncated": 216,
|
970 |
-
"padded": 864,
|
971 |
-
"non_padded": 0,
|
972 |
-
"effective_few_shots": 5.0,
|
973 |
-
"num_truncated_few_shots": 0
|
974 |
-
},
|
975 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
976 |
-
"hashes": {
|
977 |
-
"hash_examples": "74961543be40f04f",
|
978 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
979 |
-
"hash_input_tokens": "63548c7fa9ba7a78",
|
980 |
-
"hash_cont_tokens": "a331dedc2aa01b3e"
|
981 |
-
},
|
982 |
-
"truncated": 0,
|
983 |
-
"non_truncated": 204,
|
984 |
-
"padded": 816,
|
985 |
-
"non_padded": 0,
|
986 |
-
"effective_few_shots": 5.0,
|
987 |
-
"num_truncated_few_shots": 0
|
988 |
-
},
|
989 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
990 |
-
"hashes": {
|
991 |
-
"hash_examples": "2ad2f6b7198b2234",
|
992 |
-
"hash_full_prompts": "11845057459afd72",
|
993 |
-
"hash_input_tokens": "83c5da18bfa50812",
|
994 |
-
"hash_cont_tokens": "d0fbe030b8c8c2bf"
|
995 |
-
},
|
996 |
-
"truncated": 0,
|
997 |
-
"non_truncated": 237,
|
998 |
-
"padded": 948,
|
999 |
-
"non_padded": 0,
|
1000 |
-
"effective_few_shots": 5.0,
|
1001 |
-
"num_truncated_few_shots": 0
|
1002 |
-
},
|
1003 |
-
"harness|hendrycksTest-human_aging|5": {
|
1004 |
-
"hashes": {
|
1005 |
-
"hash_examples": "1a7199dc733e779b",
|
1006 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
1007 |
-
"hash_input_tokens": "bebbd11f22006685",
|
1008 |
-
"hash_cont_tokens": "1dd29c3755494850"
|
1009 |
-
},
|
1010 |
-
"truncated": 0,
|
1011 |
-
"non_truncated": 223,
|
1012 |
-
"padded": 892,
|
1013 |
-
"non_padded": 0,
|
1014 |
-
"effective_few_shots": 5.0,
|
1015 |
-
"num_truncated_few_shots": 0
|
1016 |
-
},
|
1017 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1018 |
-
"hashes": {
|
1019 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1020 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1021 |
-
"hash_input_tokens": "7b85ee9b8ee54f4f",
|
1022 |
-
"hash_cont_tokens": "c85573f663c10691"
|
1023 |
-
},
|
1024 |
-
"truncated": 0,
|
1025 |
-
"non_truncated": 131,
|
1026 |
-
"padded": 524,
|
1027 |
-
"non_padded": 0,
|
1028 |
-
"effective_few_shots": 5.0,
|
1029 |
-
"num_truncated_few_shots": 0
|
1030 |
-
},
|
1031 |
-
"harness|hendrycksTest-international_law|5": {
|
1032 |
-
"hashes": {
|
1033 |
-
"hash_examples": "1300bfd0dfc59114",
|
1034 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1035 |
-
"hash_input_tokens": "7bfc55ab7065943e",
|
1036 |
-
"hash_cont_tokens": "d263804ba918154f"
|
1037 |
-
},
|
1038 |
-
"truncated": 0,
|
1039 |
-
"non_truncated": 121,
|
1040 |
-
"padded": 484,
|
1041 |
-
"non_padded": 0,
|
1042 |
-
"effective_few_shots": 5.0,
|
1043 |
-
"num_truncated_few_shots": 0
|
1044 |
-
},
|
1045 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1046 |
-
"hashes": {
|
1047 |
-
"hash_examples": "083b1e4904c48dc2",
|
1048 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1049 |
-
"hash_input_tokens": "69573f1675e053c6",
|
1050 |
-
"hash_cont_tokens": "581986691a84ece8"
|
1051 |
-
},
|
1052 |
-
"truncated": 0,
|
1053 |
-
"non_truncated": 108,
|
1054 |
-
"padded": 432,
|
1055 |
-
"non_padded": 0,
|
1056 |
-
"effective_few_shots": 5.0,
|
1057 |
-
"num_truncated_few_shots": 0
|
1058 |
-
},
|
1059 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1060 |
-
"hashes": {
|
1061 |
-
"hash_examples": "709128f9926a634c",
|
1062 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1063 |
-
"hash_input_tokens": "552324ef20094bdc",
|
1064 |
-
"hash_cont_tokens": "55a858b28bbda458"
|
1065 |
-
},
|
1066 |
-
"truncated": 0,
|
1067 |
-
"non_truncated": 163,
|
1068 |
-
"padded": 652,
|
1069 |
-
"non_padded": 0,
|
1070 |
-
"effective_few_shots": 5.0,
|
1071 |
-
"num_truncated_few_shots": 0
|
1072 |
-
},
|
1073 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1074 |
-
"hashes": {
|
1075 |
-
"hash_examples": "88f22a636029ae47",
|
1076 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1077 |
-
"hash_input_tokens": "96449357a7318905",
|
1078 |
-
"hash_cont_tokens": "e99d3d3efd4ac7a3"
|
1079 |
-
},
|
1080 |
-
"truncated": 0,
|
1081 |
-
"non_truncated": 112,
|
1082 |
-
"padded": 448,
|
1083 |
-
"non_padded": 0,
|
1084 |
-
"effective_few_shots": 5.0,
|
1085 |
-
"num_truncated_few_shots": 0
|
1086 |
-
},
|
1087 |
-
"harness|hendrycksTest-management|5": {
|
1088 |
-
"hashes": {
|
1089 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1090 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1091 |
-
"hash_input_tokens": "3b849249168e3b88",
|
1092 |
-
"hash_cont_tokens": "13d9dc56bca34726"
|
1093 |
-
},
|
1094 |
-
"truncated": 0,
|
1095 |
-
"non_truncated": 103,
|
1096 |
-
"padded": 412,
|
1097 |
-
"non_padded": 0,
|
1098 |
-
"effective_few_shots": 5.0,
|
1099 |
-
"num_truncated_few_shots": 0
|
1100 |
-
},
|
1101 |
-
"harness|hendrycksTest-marketing|5": {
|
1102 |
-
"hashes": {
|
1103 |
-
"hash_examples": "2668953431f91e96",
|
1104 |
-
"hash_full_prompts": "77562bef997c7650",
|
1105 |
-
"hash_input_tokens": "af0e186f2756b70d",
|
1106 |
-
"hash_cont_tokens": "2700ea26933916a2"
|
1107 |
-
},
|
1108 |
-
"truncated": 0,
|
1109 |
-
"non_truncated": 234,
|
1110 |
-
"padded": 936,
|
1111 |
-
"non_padded": 0,
|
1112 |
-
"effective_few_shots": 5.0,
|
1113 |
-
"num_truncated_few_shots": 0
|
1114 |
-
},
|
1115 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1116 |
-
"hashes": {
|
1117 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1118 |
-
"hash_full_prompts": "202139046daa118f",
|
1119 |
-
"hash_input_tokens": "9f6a6de16509b6d9",
|
1120 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1121 |
-
},
|
1122 |
-
"truncated": 0,
|
1123 |
-
"non_truncated": 100,
|
1124 |
-
"padded": 400,
|
1125 |
-
"non_padded": 0,
|
1126 |
-
"effective_few_shots": 5.0,
|
1127 |
-
"num_truncated_few_shots": 0
|
1128 |
-
},
|
1129 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1130 |
-
"hashes": {
|
1131 |
-
"hash_examples": "41adb694024809c2",
|
1132 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1133 |
-
"hash_input_tokens": "9194406d589f7c10",
|
1134 |
-
"hash_cont_tokens": "7bf4341c79587250"
|
1135 |
-
},
|
1136 |
-
"truncated": 0,
|
1137 |
-
"non_truncated": 783,
|
1138 |
-
"padded": 3132,
|
1139 |
-
"non_padded": 0,
|
1140 |
-
"effective_few_shots": 5.0,
|
1141 |
-
"num_truncated_few_shots": 0
|
1142 |
-
},
|
1143 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1144 |
-
"hashes": {
|
1145 |
-
"hash_examples": "3171c13ba3c594c4",
|
1146 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1147 |
-
"hash_input_tokens": "769486efc74d9f8e",
|
1148 |
-
"hash_cont_tokens": "38a48e9de6976f00"
|
1149 |
-
},
|
1150 |
-
"truncated": 0,
|
1151 |
-
"non_truncated": 346,
|
1152 |
-
"padded": 1384,
|
1153 |
-
"non_padded": 0,
|
1154 |
-
"effective_few_shots": 5.0,
|
1155 |
-
"num_truncated_few_shots": 0
|
1156 |
-
},
|
1157 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1158 |
-
"hashes": {
|
1159 |
-
"hash_examples": "9873e077e83e0546",
|
1160 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1161 |
-
"hash_input_tokens": "a90fd4dd90959dad",
|
1162 |
-
"hash_cont_tokens": "761c4dc187689d89"
|
1163 |
-
},
|
1164 |
-
"truncated": 0,
|
1165 |
-
"non_truncated": 895,
|
1166 |
-
"padded": 3580,
|
1167 |
-
"non_padded": 0,
|
1168 |
-
"effective_few_shots": 5.0,
|
1169 |
-
"num_truncated_few_shots": 0
|
1170 |
-
},
|
1171 |
-
"harness|hendrycksTest-nutrition|5": {
|
1172 |
-
"hashes": {
|
1173 |
-
"hash_examples": "7db1d8142ec14323",
|
1174 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1175 |
-
"hash_input_tokens": "1a3b843e66efd29b",
|
1176 |
-
"hash_cont_tokens": "65005bd7d6f6012a"
|
1177 |
-
},
|
1178 |
-
"truncated": 0,
|
1179 |
-
"non_truncated": 306,
|
1180 |
-
"padded": 1224,
|
1181 |
-
"non_padded": 0,
|
1182 |
-
"effective_few_shots": 5.0,
|
1183 |
-
"num_truncated_few_shots": 0
|
1184 |
-
},
|
1185 |
-
"harness|hendrycksTest-philosophy|5": {
|
1186 |
-
"hashes": {
|
1187 |
-
"hash_examples": "9b455b7d72811cc8",
|
1188 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1189 |
-
"hash_input_tokens": "09820001a3d00013",
|
1190 |
-
"hash_cont_tokens": "0b47934fb6314dec"
|
1191 |
-
},
|
1192 |
-
"truncated": 0,
|
1193 |
-
"non_truncated": 311,
|
1194 |
-
"padded": 1244,
|
1195 |
-
"non_padded": 0,
|
1196 |
-
"effective_few_shots": 5.0,
|
1197 |
-
"num_truncated_few_shots": 0
|
1198 |
-
},
|
1199 |
-
"harness|hendrycksTest-prehistory|5": {
|
1200 |
-
"hashes": {
|
1201 |
-
"hash_examples": "8be90d0f538f1560",
|
1202 |
-
"hash_full_prompts": "152187949bcd0921",
|
1203 |
-
"hash_input_tokens": "7c4ec364ce2768c7",
|
1204 |
-
"hash_cont_tokens": "3f20acd855ee0a29"
|
1205 |
-
},
|
1206 |
-
"truncated": 0,
|
1207 |
-
"non_truncated": 324,
|
1208 |
-
"padded": 1296,
|
1209 |
-
"non_padded": 0,
|
1210 |
-
"effective_few_shots": 5.0,
|
1211 |
-
"num_truncated_few_shots": 0
|
1212 |
-
},
|
1213 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1214 |
-
"hashes": {
|
1215 |
-
"hash_examples": "8d377597916cd07e",
|
1216 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1217 |
-
"hash_input_tokens": "ced0534574d0ae3f",
|
1218 |
-
"hash_cont_tokens": "8f122ba881355d4b"
|
1219 |
-
},
|
1220 |
-
"truncated": 0,
|
1221 |
-
"non_truncated": 282,
|
1222 |
-
"padded": 1128,
|
1223 |
-
"non_padded": 0,
|
1224 |
-
"effective_few_shots": 5.0,
|
1225 |
-
"num_truncated_few_shots": 0
|
1226 |
-
},
|
1227 |
-
"harness|hendrycksTest-professional_law|5": {
|
1228 |
-
"hashes": {
|
1229 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1230 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1231 |
-
"hash_input_tokens": "bcbdbbde22ec73e3",
|
1232 |
-
"hash_cont_tokens": "90d5df417c4d3fd3"
|
1233 |
-
},
|
1234 |
-
"truncated": 0,
|
1235 |
-
"non_truncated": 1534,
|
1236 |
-
"padded": 6136,
|
1237 |
-
"non_padded": 0,
|
1238 |
-
"effective_few_shots": 5.0,
|
1239 |
-
"num_truncated_few_shots": 0
|
1240 |
-
},
|
1241 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1242 |
-
"hashes": {
|
1243 |
-
"hash_examples": "b20e4e816c1e383e",
|
1244 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1245 |
-
"hash_input_tokens": "c54d753563114d45",
|
1246 |
-
"hash_cont_tokens": "4a2d2988884f7f70"
|
1247 |
-
},
|
1248 |
-
"truncated": 0,
|
1249 |
-
"non_truncated": 272,
|
1250 |
-
"padded": 1088,
|
1251 |
-
"non_padded": 0,
|
1252 |
-
"effective_few_shots": 5.0,
|
1253 |
-
"num_truncated_few_shots": 0
|
1254 |
-
},
|
1255 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1256 |
-
"hashes": {
|
1257 |
-
"hash_examples": "d45b73b22f9cc039",
|
1258 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1259 |
-
"hash_input_tokens": "b75dc55c0e32fa52",
|
1260 |
-
"hash_cont_tokens": "e0a952cb8a9c81de"
|
1261 |
-
},
|
1262 |
-
"truncated": 0,
|
1263 |
-
"non_truncated": 612,
|
1264 |
-
"padded": 2448,
|
1265 |
-
"non_padded": 0,
|
1266 |
-
"effective_few_shots": 5.0,
|
1267 |
-
"num_truncated_few_shots": 0
|
1268 |
-
},
|
1269 |
-
"harness|hendrycksTest-public_relations|5": {
|
1270 |
-
"hashes": {
|
1271 |
-
"hash_examples": "0d25072e1761652a",
|
1272 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1273 |
-
"hash_input_tokens": "5ccdc8ec8db99622",
|
1274 |
-
"hash_cont_tokens": "1fa77a8dff3922b8"
|
1275 |
-
},
|
1276 |
-
"truncated": 0,
|
1277 |
-
"non_truncated": 110,
|
1278 |
-
"padded": 440,
|
1279 |
-
"non_padded": 0,
|
1280 |
-
"effective_few_shots": 5.0,
|
1281 |
-
"num_truncated_few_shots": 0
|
1282 |
-
},
|
1283 |
-
"harness|hendrycksTest-security_studies|5": {
|
1284 |
-
"hashes": {
|
1285 |
-
"hash_examples": "62bb8197e63d60d4",
|
1286 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1287 |
-
"hash_input_tokens": "ca8497342e5b1d57",
|
1288 |
-
"hash_cont_tokens": "81fc9cb3cbdd52db"
|
1289 |
-
},
|
1290 |
-
"truncated": 0,
|
1291 |
-
"non_truncated": 245,
|
1292 |
-
"padded": 980,
|
1293 |
-
"non_padded": 0,
|
1294 |
-
"effective_few_shots": 5.0,
|
1295 |
-
"num_truncated_few_shots": 0
|
1296 |
-
},
|
1297 |
-
"harness|hendrycksTest-sociology|5": {
|
1298 |
-
"hashes": {
|
1299 |
-
"hash_examples": "e7959df87dea8672",
|
1300 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1301 |
-
"hash_input_tokens": "069c76424fbd3dab",
|
1302 |
-
"hash_cont_tokens": "2a0493252ed2cf43"
|
1303 |
-
},
|
1304 |
-
"truncated": 0,
|
1305 |
-
"non_truncated": 201,
|
1306 |
-
"padded": 804,
|
1307 |
-
"non_padded": 0,
|
1308 |
-
"effective_few_shots": 5.0,
|
1309 |
-
"num_truncated_few_shots": 0
|
1310 |
-
},
|
1311 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1312 |
-
"hashes": {
|
1313 |
-
"hash_examples": "4a56a01ddca44dca",
|
1314 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1315 |
-
"hash_input_tokens": "a7e393a626169576",
|
1316 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1317 |
-
},
|
1318 |
-
"truncated": 0,
|
1319 |
-
"non_truncated": 100,
|
1320 |
-
"padded": 400,
|
1321 |
-
"non_padded": 0,
|
1322 |
-
"effective_few_shots": 5.0,
|
1323 |
-
"num_truncated_few_shots": 0
|
1324 |
-
},
|
1325 |
-
"harness|hendrycksTest-virology|5": {
|
1326 |
-
"hashes": {
|
1327 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1328 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1329 |
-
"hash_input_tokens": "bf99dc973e3a650d",
|
1330 |
-
"hash_cont_tokens": "5ab892d003b00c98"
|
1331 |
-
},
|
1332 |
-
"truncated": 0,
|
1333 |
-
"non_truncated": 166,
|
1334 |
-
"padded": 664,
|
1335 |
-
"non_padded": 0,
|
1336 |
-
"effective_few_shots": 5.0,
|
1337 |
-
"num_truncated_few_shots": 0
|
1338 |
-
},
|
1339 |
-
"harness|hendrycksTest-world_religions|5": {
|
1340 |
-
"hashes": {
|
1341 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1342 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1343 |
-
"hash_input_tokens": "1761cfaf21797065",
|
1344 |
-
"hash_cont_tokens": "15a5e5dbdfbb8568"
|
1345 |
-
},
|
1346 |
-
"truncated": 0,
|
1347 |
-
"non_truncated": 171,
|
1348 |
-
"padded": 684,
|
1349 |
-
"non_padded": 0,
|
1350 |
-
"effective_few_shots": 5.0,
|
1351 |
-
"num_truncated_few_shots": 0
|
1352 |
-
},
|
1353 |
-
"harness|truthfulqa:mc|0": {
|
1354 |
-
"hashes": {
|
1355 |
-
"hash_examples": "23176c0531c7b867",
|
1356 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1357 |
-
"hash_input_tokens": "298b43914bbdf4ca",
|
1358 |
-
"hash_cont_tokens": "5a8d4bb398b1c3c0"
|
1359 |
-
},
|
1360 |
-
"truncated": 0,
|
1361 |
-
"non_truncated": 817,
|
1362 |
-
"padded": 9996,
|
1363 |
-
"non_padded": 0,
|
1364 |
-
"effective_few_shots": 0.0,
|
1365 |
-
"num_truncated_few_shots": 0
|
1366 |
-
},
|
1367 |
-
"harness|winogrande|5": {
|
1368 |
-
"hashes": {
|
1369 |
-
"hash_examples": "aada0a176fd81218",
|
1370 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
1371 |
-
"hash_input_tokens": "31aa3477d959f771",
|
1372 |
-
"hash_cont_tokens": "618558fb93c0f288"
|
1373 |
-
},
|
1374 |
-
"truncated": 0,
|
1375 |
-
"non_truncated": 1267,
|
1376 |
-
"padded": 2534,
|
1377 |
-
"non_padded": 0,
|
1378 |
-
"effective_few_shots": 5.0,
|
1379 |
-
"num_truncated_few_shots": 0
|
1380 |
-
},
|
1381 |
-
"harness|gsm8k|5": {
|
1382 |
-
"hashes": {
|
1383 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
1384 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
1385 |
-
"hash_input_tokens": "6af0ae8cfe684f50",
|
1386 |
-
"hash_cont_tokens": "68c8cd5264486757"
|
1387 |
-
},
|
1388 |
-
"truncated": 0,
|
1389 |
-
"non_truncated": 1319,
|
1390 |
-
"padded": 0,
|
1391 |
-
"non_padded": 1319,
|
1392 |
-
"effective_few_shots": 5.0,
|
1393 |
-
"num_truncated_few_shots": 0
|
1394 |
-
}
|
1395 |
-
},
|
1396 |
-
"summary_general": {
|
1397 |
-
"hashes": {
|
1398 |
-
"hash_examples": "3b7fa57a057f9415",
|
1399 |
-
"hash_full_prompts": "63615fc50fc9417c",
|
1400 |
-
"hash_input_tokens": "9c04e828ae29cacc",
|
1401 |
-
"hash_cont_tokens": "9e643dad14bb0f53"
|
1402 |
-
},
|
1403 |
-
"truncated": 0,
|
1404 |
-
"non_truncated": 28659,
|
1405 |
-
"padded": 113460,
|
1406 |
-
"non_padded": 1412,
|
1407 |
-
"num_truncated_few_shots": 0
|
1408 |
-
}
|
1409 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-10.7B-v1.0/results_2023-12-13T16-05-57.212237.json
DELETED
@@ -1,1409 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"lighteval_sha": "0e4607eff593f6f842aeaa0e5fa6760f58b9d1e9",
|
4 |
-
"num_few_shot_default": 0,
|
5 |
-
"num_fewshot_seeds": 1,
|
6 |
-
"override_batch_size": 1,
|
7 |
-
"max_samples": null,
|
8 |
-
"job_id": "",
|
9 |
-
"start_time": 101445.578471752,
|
10 |
-
"end_time": 114516.103064624,
|
11 |
-
"total_evaluation_time_secondes": "13070.524592871996",
|
12 |
-
"model_name": "upstage/SOLAR-10.7B-v1.0",
|
13 |
-
"model_sha": "6e2783822f35c376ea96852fe479faa6a8bf09cb",
|
14 |
-
"model_dtype": "torch.float16",
|
15 |
-
"model_size": "20.08 GB"
|
16 |
-
},
|
17 |
-
"results": {
|
18 |
-
"harness|arc:challenge|25": {
|
19 |
-
"acc": 0.5870307167235495,
|
20 |
-
"acc_stderr": 0.014388344935398324,
|
21 |
-
"acc_norm": 0.6194539249146758,
|
22 |
-
"acc_norm_stderr": 0.014188277712349812
|
23 |
-
},
|
24 |
-
"harness|hellaswag|10": {
|
25 |
-
"acc": 0.6542521410077674,
|
26 |
-
"acc_stderr": 0.0047463946133845395,
|
27 |
-
"acc_norm": 0.8460466042620992,
|
28 |
-
"acc_norm_stderr": 0.0036016648387189156
|
29 |
-
},
|
30 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
31 |
-
"acc": 0.38,
|
32 |
-
"acc_stderr": 0.048783173121456316,
|
33 |
-
"acc_norm": 0.38,
|
34 |
-
"acc_norm_stderr": 0.048783173121456316
|
35 |
-
},
|
36 |
-
"harness|hendrycksTest-anatomy|5": {
|
37 |
-
"acc": 0.6,
|
38 |
-
"acc_stderr": 0.04232073695151589,
|
39 |
-
"acc_norm": 0.6,
|
40 |
-
"acc_norm_stderr": 0.04232073695151589
|
41 |
-
},
|
42 |
-
"harness|hendrycksTest-astronomy|5": {
|
43 |
-
"acc": 0.75,
|
44 |
-
"acc_stderr": 0.03523807393012047,
|
45 |
-
"acc_norm": 0.75,
|
46 |
-
"acc_norm_stderr": 0.03523807393012047
|
47 |
-
},
|
48 |
-
"harness|hendrycksTest-business_ethics|5": {
|
49 |
-
"acc": 0.67,
|
50 |
-
"acc_stderr": 0.04725815626252607,
|
51 |
-
"acc_norm": 0.67,
|
52 |
-
"acc_norm_stderr": 0.04725815626252607
|
53 |
-
},
|
54 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
55 |
-
"acc": 0.7094339622641509,
|
56 |
-
"acc_stderr": 0.027943219989337142,
|
57 |
-
"acc_norm": 0.7094339622641509,
|
58 |
-
"acc_norm_stderr": 0.027943219989337142
|
59 |
-
},
|
60 |
-
"harness|hendrycksTest-college_biology|5": {
|
61 |
-
"acc": 0.7638888888888888,
|
62 |
-
"acc_stderr": 0.03551446610810826,
|
63 |
-
"acc_norm": 0.7638888888888888,
|
64 |
-
"acc_norm_stderr": 0.03551446610810826
|
65 |
-
},
|
66 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
67 |
-
"acc": 0.45,
|
68 |
-
"acc_stderr": 0.05,
|
69 |
-
"acc_norm": 0.45,
|
70 |
-
"acc_norm_stderr": 0.05
|
71 |
-
},
|
72 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
73 |
-
"acc": 0.54,
|
74 |
-
"acc_stderr": 0.05009082659620333,
|
75 |
-
"acc_norm": 0.54,
|
76 |
-
"acc_norm_stderr": 0.05009082659620333
|
77 |
-
},
|
78 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
79 |
-
"acc": 0.38,
|
80 |
-
"acc_stderr": 0.04878317312145632,
|
81 |
-
"acc_norm": 0.38,
|
82 |
-
"acc_norm_stderr": 0.04878317312145632
|
83 |
-
},
|
84 |
-
"harness|hendrycksTest-college_medicine|5": {
|
85 |
-
"acc": 0.6705202312138728,
|
86 |
-
"acc_stderr": 0.03583901754736412,
|
87 |
-
"acc_norm": 0.6705202312138728,
|
88 |
-
"acc_norm_stderr": 0.03583901754736412
|
89 |
-
},
|
90 |
-
"harness|hendrycksTest-college_physics|5": {
|
91 |
-
"acc": 0.35294117647058826,
|
92 |
-
"acc_stderr": 0.04755129616062946,
|
93 |
-
"acc_norm": 0.35294117647058826,
|
94 |
-
"acc_norm_stderr": 0.04755129616062946
|
95 |
-
},
|
96 |
-
"harness|hendrycksTest-computer_security|5": {
|
97 |
-
"acc": 0.74,
|
98 |
-
"acc_stderr": 0.04408440022768078,
|
99 |
-
"acc_norm": 0.74,
|
100 |
-
"acc_norm_stderr": 0.04408440022768078
|
101 |
-
},
|
102 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
103 |
-
"acc": 0.5829787234042553,
|
104 |
-
"acc_stderr": 0.03223276266711712,
|
105 |
-
"acc_norm": 0.5829787234042553,
|
106 |
-
"acc_norm_stderr": 0.03223276266711712
|
107 |
-
},
|
108 |
-
"harness|hendrycksTest-econometrics|5": {
|
109 |
-
"acc": 0.5,
|
110 |
-
"acc_stderr": 0.047036043419179864,
|
111 |
-
"acc_norm": 0.5,
|
112 |
-
"acc_norm_stderr": 0.047036043419179864
|
113 |
-
},
|
114 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
115 |
-
"acc": 0.6068965517241379,
|
116 |
-
"acc_stderr": 0.0407032901370707,
|
117 |
-
"acc_norm": 0.6068965517241379,
|
118 |
-
"acc_norm_stderr": 0.0407032901370707
|
119 |
-
},
|
120 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
121 |
-
"acc": 0.42592592592592593,
|
122 |
-
"acc_stderr": 0.025467149045469536,
|
123 |
-
"acc_norm": 0.42592592592592593,
|
124 |
-
"acc_norm_stderr": 0.025467149045469536
|
125 |
-
},
|
126 |
-
"harness|hendrycksTest-formal_logic|5": {
|
127 |
-
"acc": 0.4365079365079365,
|
128 |
-
"acc_stderr": 0.04435932892851466,
|
129 |
-
"acc_norm": 0.4365079365079365,
|
130 |
-
"acc_norm_stderr": 0.04435932892851466
|
131 |
-
},
|
132 |
-
"harness|hendrycksTest-global_facts|5": {
|
133 |
-
"acc": 0.32,
|
134 |
-
"acc_stderr": 0.04688261722621504,
|
135 |
-
"acc_norm": 0.32,
|
136 |
-
"acc_norm_stderr": 0.04688261722621504
|
137 |
-
},
|
138 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
139 |
-
"acc": 0.7967741935483871,
|
140 |
-
"acc_stderr": 0.022891687984554963,
|
141 |
-
"acc_norm": 0.7967741935483871,
|
142 |
-
"acc_norm_stderr": 0.022891687984554963
|
143 |
-
},
|
144 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
145 |
-
"acc": 0.4827586206896552,
|
146 |
-
"acc_stderr": 0.035158955511656986,
|
147 |
-
"acc_norm": 0.4827586206896552,
|
148 |
-
"acc_norm_stderr": 0.035158955511656986
|
149 |
-
},
|
150 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
151 |
-
"acc": 0.65,
|
152 |
-
"acc_stderr": 0.0479372485441102,
|
153 |
-
"acc_norm": 0.65,
|
154 |
-
"acc_norm_stderr": 0.0479372485441102
|
155 |
-
},
|
156 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
157 |
-
"acc": 0.7878787878787878,
|
158 |
-
"acc_stderr": 0.031922715695482995,
|
159 |
-
"acc_norm": 0.7878787878787878,
|
160 |
-
"acc_norm_stderr": 0.031922715695482995
|
161 |
-
},
|
162 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
163 |
-
"acc": 0.8434343434343434,
|
164 |
-
"acc_stderr": 0.025890520358141454,
|
165 |
-
"acc_norm": 0.8434343434343434,
|
166 |
-
"acc_norm_stderr": 0.025890520358141454
|
167 |
-
},
|
168 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
169 |
-
"acc": 0.9222797927461139,
|
170 |
-
"acc_stderr": 0.019321805557223154,
|
171 |
-
"acc_norm": 0.9222797927461139,
|
172 |
-
"acc_norm_stderr": 0.019321805557223154
|
173 |
-
},
|
174 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
175 |
-
"acc": 0.6564102564102564,
|
176 |
-
"acc_stderr": 0.024078696580635474,
|
177 |
-
"acc_norm": 0.6564102564102564,
|
178 |
-
"acc_norm_stderr": 0.024078696580635474
|
179 |
-
},
|
180 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
181 |
-
"acc": 0.37777777777777777,
|
182 |
-
"acc_stderr": 0.029560707392465715,
|
183 |
-
"acc_norm": 0.37777777777777777,
|
184 |
-
"acc_norm_stderr": 0.029560707392465715
|
185 |
-
},
|
186 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
187 |
-
"acc": 0.6890756302521008,
|
188 |
-
"acc_stderr": 0.030066761582977934,
|
189 |
-
"acc_norm": 0.6890756302521008,
|
190 |
-
"acc_norm_stderr": 0.030066761582977934
|
191 |
-
},
|
192 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
193 |
-
"acc": 0.3509933774834437,
|
194 |
-
"acc_stderr": 0.03896981964257375,
|
195 |
-
"acc_norm": 0.3509933774834437,
|
196 |
-
"acc_norm_stderr": 0.03896981964257375
|
197 |
-
},
|
198 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
199 |
-
"acc": 0.8422018348623853,
|
200 |
-
"acc_stderr": 0.015630022970092434,
|
201 |
-
"acc_norm": 0.8422018348623853,
|
202 |
-
"acc_norm_stderr": 0.015630022970092434
|
203 |
-
},
|
204 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
205 |
-
"acc": 0.6111111111111112,
|
206 |
-
"acc_stderr": 0.033247089118091176,
|
207 |
-
"acc_norm": 0.6111111111111112,
|
208 |
-
"acc_norm_stderr": 0.033247089118091176
|
209 |
-
},
|
210 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
211 |
-
"acc": 0.8529411764705882,
|
212 |
-
"acc_stderr": 0.024857478080250454,
|
213 |
-
"acc_norm": 0.8529411764705882,
|
214 |
-
"acc_norm_stderr": 0.024857478080250454
|
215 |
-
},
|
216 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
217 |
-
"acc": 0.8270042194092827,
|
218 |
-
"acc_stderr": 0.02462156286676842,
|
219 |
-
"acc_norm": 0.8270042194092827,
|
220 |
-
"acc_norm_stderr": 0.02462156286676842
|
221 |
-
},
|
222 |
-
"harness|hendrycksTest-human_aging|5": {
|
223 |
-
"acc": 0.7130044843049327,
|
224 |
-
"acc_stderr": 0.030360379710291947,
|
225 |
-
"acc_norm": 0.7130044843049327,
|
226 |
-
"acc_norm_stderr": 0.030360379710291947
|
227 |
-
},
|
228 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
229 |
-
"acc": 0.7557251908396947,
|
230 |
-
"acc_stderr": 0.03768335959728744,
|
231 |
-
"acc_norm": 0.7557251908396947,
|
232 |
-
"acc_norm_stderr": 0.03768335959728744
|
233 |
-
},
|
234 |
-
"harness|hendrycksTest-international_law|5": {
|
235 |
-
"acc": 0.8016528925619835,
|
236 |
-
"acc_stderr": 0.036401182719909456,
|
237 |
-
"acc_norm": 0.8016528925619835,
|
238 |
-
"acc_norm_stderr": 0.036401182719909456
|
239 |
-
},
|
240 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
241 |
-
"acc": 0.7777777777777778,
|
242 |
-
"acc_stderr": 0.040191074725573483,
|
243 |
-
"acc_norm": 0.7777777777777778,
|
244 |
-
"acc_norm_stderr": 0.040191074725573483
|
245 |
-
},
|
246 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
247 |
-
"acc": 0.7668711656441718,
|
248 |
-
"acc_stderr": 0.0332201579577674,
|
249 |
-
"acc_norm": 0.7668711656441718,
|
250 |
-
"acc_norm_stderr": 0.0332201579577674
|
251 |
-
},
|
252 |
-
"harness|hendrycksTest-machine_learning|5": {
|
253 |
-
"acc": 0.42857142857142855,
|
254 |
-
"acc_stderr": 0.04697113923010212,
|
255 |
-
"acc_norm": 0.42857142857142855,
|
256 |
-
"acc_norm_stderr": 0.04697113923010212
|
257 |
-
},
|
258 |
-
"harness|hendrycksTest-management|5": {
|
259 |
-
"acc": 0.8155339805825242,
|
260 |
-
"acc_stderr": 0.03840423627288276,
|
261 |
-
"acc_norm": 0.8155339805825242,
|
262 |
-
"acc_norm_stderr": 0.03840423627288276
|
263 |
-
},
|
264 |
-
"harness|hendrycksTest-marketing|5": {
|
265 |
-
"acc": 0.8760683760683761,
|
266 |
-
"acc_stderr": 0.021586494001281382,
|
267 |
-
"acc_norm": 0.8760683760683761,
|
268 |
-
"acc_norm_stderr": 0.021586494001281382
|
269 |
-
},
|
270 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
271 |
-
"acc": 0.75,
|
272 |
-
"acc_stderr": 0.04351941398892446,
|
273 |
-
"acc_norm": 0.75,
|
274 |
-
"acc_norm_stderr": 0.04351941398892446
|
275 |
-
},
|
276 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
277 |
-
"acc": 0.8275862068965517,
|
278 |
-
"acc_stderr": 0.013507943909371803,
|
279 |
-
"acc_norm": 0.8275862068965517,
|
280 |
-
"acc_norm_stderr": 0.013507943909371803
|
281 |
-
},
|
282 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
283 |
-
"acc": 0.7254335260115607,
|
284 |
-
"acc_stderr": 0.024027745155265023,
|
285 |
-
"acc_norm": 0.7254335260115607,
|
286 |
-
"acc_norm_stderr": 0.024027745155265023
|
287 |
-
},
|
288 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
289 |
-
"acc": 0.25027932960893856,
|
290 |
-
"acc_stderr": 0.01448750085285042,
|
291 |
-
"acc_norm": 0.25027932960893856,
|
292 |
-
"acc_norm_stderr": 0.01448750085285042
|
293 |
-
},
|
294 |
-
"harness|hendrycksTest-nutrition|5": {
|
295 |
-
"acc": 0.7875816993464052,
|
296 |
-
"acc_stderr": 0.02342037547829613,
|
297 |
-
"acc_norm": 0.7875816993464052,
|
298 |
-
"acc_norm_stderr": 0.02342037547829613
|
299 |
-
},
|
300 |
-
"harness|hendrycksTest-philosophy|5": {
|
301 |
-
"acc": 0.6945337620578779,
|
302 |
-
"acc_stderr": 0.02616058445014045,
|
303 |
-
"acc_norm": 0.6945337620578779,
|
304 |
-
"acc_norm_stderr": 0.02616058445014045
|
305 |
-
},
|
306 |
-
"harness|hendrycksTest-prehistory|5": {
|
307 |
-
"acc": 0.7654320987654321,
|
308 |
-
"acc_stderr": 0.023576881744005716,
|
309 |
-
"acc_norm": 0.7654320987654321,
|
310 |
-
"acc_norm_stderr": 0.023576881744005716
|
311 |
-
},
|
312 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
313 |
-
"acc": 0.48226950354609927,
|
314 |
-
"acc_stderr": 0.02980873964223777,
|
315 |
-
"acc_norm": 0.48226950354609927,
|
316 |
-
"acc_norm_stderr": 0.02980873964223777
|
317 |
-
},
|
318 |
-
"harness|hendrycksTest-professional_law|5": {
|
319 |
-
"acc": 0.49608865710560623,
|
320 |
-
"acc_stderr": 0.012769845366441192,
|
321 |
-
"acc_norm": 0.49608865710560623,
|
322 |
-
"acc_norm_stderr": 0.012769845366441192
|
323 |
-
},
|
324 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
325 |
-
"acc": 0.7352941176470589,
|
326 |
-
"acc_stderr": 0.026799562024887664,
|
327 |
-
"acc_norm": 0.7352941176470589,
|
328 |
-
"acc_norm_stderr": 0.026799562024887664
|
329 |
-
},
|
330 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
331 |
-
"acc": 0.7058823529411765,
|
332 |
-
"acc_stderr": 0.018433427649401903,
|
333 |
-
"acc_norm": 0.7058823529411765,
|
334 |
-
"acc_norm_stderr": 0.018433427649401903
|
335 |
-
},
|
336 |
-
"harness|hendrycksTest-public_relations|5": {
|
337 |
-
"acc": 0.7,
|
338 |
-
"acc_stderr": 0.04389311454644286,
|
339 |
-
"acc_norm": 0.7,
|
340 |
-
"acc_norm_stderr": 0.04389311454644286
|
341 |
-
},
|
342 |
-
"harness|hendrycksTest-security_studies|5": {
|
343 |
-
"acc": 0.7714285714285715,
|
344 |
-
"acc_stderr": 0.026882144922307744,
|
345 |
-
"acc_norm": 0.7714285714285715,
|
346 |
-
"acc_norm_stderr": 0.026882144922307744
|
347 |
-
},
|
348 |
-
"harness|hendrycksTest-sociology|5": {
|
349 |
-
"acc": 0.8756218905472637,
|
350 |
-
"acc_stderr": 0.023335401790166327,
|
351 |
-
"acc_norm": 0.8756218905472637,
|
352 |
-
"acc_norm_stderr": 0.023335401790166327
|
353 |
-
},
|
354 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
355 |
-
"acc": 0.89,
|
356 |
-
"acc_stderr": 0.03144660377352203,
|
357 |
-
"acc_norm": 0.89,
|
358 |
-
"acc_norm_stderr": 0.03144660377352203
|
359 |
-
},
|
360 |
-
"harness|hendrycksTest-virology|5": {
|
361 |
-
"acc": 0.5301204819277109,
|
362 |
-
"acc_stderr": 0.03885425420866767,
|
363 |
-
"acc_norm": 0.5301204819277109,
|
364 |
-
"acc_norm_stderr": 0.03885425420866767
|
365 |
-
},
|
366 |
-
"harness|hendrycksTest-world_religions|5": {
|
367 |
-
"acc": 0.8304093567251462,
|
368 |
-
"acc_stderr": 0.028782108105401705,
|
369 |
-
"acc_norm": 0.8304093567251462,
|
370 |
-
"acc_norm_stderr": 0.028782108105401705
|
371 |
-
},
|
372 |
-
"harness|truthfulqa:mc|0": {
|
373 |
-
"mc1": 0.3047735618115055,
|
374 |
-
"mc1_stderr": 0.016114124156882455,
|
375 |
-
"mc2": 0.4503940613910581,
|
376 |
-
"mc2_stderr": 0.014223702771748893
|
377 |
-
},
|
378 |
-
"harness|winogrande|5": {
|
379 |
-
"acc": 0.8366219415943172,
|
380 |
-
"acc_stderr": 0.010390695970273764
|
381 |
-
},
|
382 |
-
"harness|gsm8k|5": {
|
383 |
-
"acc": 0.5549658832448825,
|
384 |
-
"acc_stderr": 0.013689011567414202
|
385 |
-
},
|
386 |
-
"all": {
|
387 |
-
"acc": 0.6550126565893573,
|
388 |
-
"acc_stderr": 0.031671818397293244,
|
389 |
-
"acc_norm": 0.6574305406535094,
|
390 |
-
"acc_norm_stderr": 0.03231451555422857,
|
391 |
-
"mc1": 0.3047735618115055,
|
392 |
-
"mc1_stderr": 0.016114124156882455,
|
393 |
-
"mc2": 0.4503940613910581,
|
394 |
-
"mc2_stderr": 0.014223702771748893
|
395 |
-
}
|
396 |
-
},
|
397 |
-
"versions": {
|
398 |
-
"all": 0,
|
399 |
-
"harness|arc:challenge|25": 0,
|
400 |
-
"harness|gsm8k|5": 0,
|
401 |
-
"harness|hellaswag|10": 0,
|
402 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
403 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
404 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
405 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
406 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
407 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
408 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
409 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
410 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
411 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
412 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
413 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
414 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
415 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
416 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
417 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
418 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
419 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
420 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
421 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
422 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
423 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
424 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
425 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
426 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
427 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
428 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
429 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
430 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
431 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
432 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
433 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
434 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
435 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
436 |
-
"harness|hendrycksTest-international_law|5": 1,
|
437 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
438 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
439 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
440 |
-
"harness|hendrycksTest-management|5": 1,
|
441 |
-
"harness|hendrycksTest-marketing|5": 1,
|
442 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
443 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
444 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
445 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
446 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
447 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
448 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
449 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
450 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
451 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
452 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
453 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
454 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
455 |
-
"harness|hendrycksTest-sociology|5": 1,
|
456 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
457 |
-
"harness|hendrycksTest-virology|5": 1,
|
458 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
459 |
-
"harness|truthfulqa:mc|0": 1,
|
460 |
-
"harness|winogrande|5": 0
|
461 |
-
},
|
462 |
-
"config_tasks": {
|
463 |
-
"harness|arc:challenge": "LM Harness task",
|
464 |
-
"harness|gsm8k": "LM Harness task",
|
465 |
-
"harness|hellaswag": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
507 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
508 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
509 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
510 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
511 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
512 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
513 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
514 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
515 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
516 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
517 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
518 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
519 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
520 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
521 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
522 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
523 |
-
"harness|truthfulqa:mc": "LM Harness task",
|
524 |
-
"harness|winogrande": "LM Harness task"
|
525 |
-
},
|
526 |
-
"summary_tasks": {
|
527 |
-
"harness|arc:challenge|25": {
|
528 |
-
"hashes": {
|
529 |
-
"hash_examples": "17b0cae357c0259e",
|
530 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
531 |
-
"hash_input_tokens": "9bcd0d1d37471713",
|
532 |
-
"hash_cont_tokens": "289aa98c400841d8"
|
533 |
-
},
|
534 |
-
"truncated": 0,
|
535 |
-
"non_truncated": 1172,
|
536 |
-
"padded": 4670,
|
537 |
-
"non_padded": 17,
|
538 |
-
"effective_few_shots": 25.0,
|
539 |
-
"num_truncated_few_shots": 0
|
540 |
-
},
|
541 |
-
"harness|hellaswag|10": {
|
542 |
-
"hashes": {
|
543 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
544 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
545 |
-
"hash_input_tokens": "80b8c6d79740318e",
|
546 |
-
"hash_cont_tokens": "ac460260c3e6efc9"
|
547 |
-
},
|
548 |
-
"truncated": 0,
|
549 |
-
"non_truncated": 10042,
|
550 |
-
"padded": 40101,
|
551 |
-
"non_padded": 67,
|
552 |
-
"effective_few_shots": 10.0,
|
553 |
-
"num_truncated_few_shots": 0
|
554 |
-
},
|
555 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
556 |
-
"hashes": {
|
557 |
-
"hash_examples": "280f9f325b40559a",
|
558 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
559 |
-
"hash_input_tokens": "b813d36287c6556c",
|
560 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
561 |
-
},
|
562 |
-
"truncated": 0,
|
563 |
-
"non_truncated": 100,
|
564 |
-
"padded": 400,
|
565 |
-
"non_padded": 0,
|
566 |
-
"effective_few_shots": 5.0,
|
567 |
-
"num_truncated_few_shots": 0
|
568 |
-
},
|
569 |
-
"harness|hendrycksTest-anatomy|5": {
|
570 |
-
"hashes": {
|
571 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
572 |
-
"hash_full_prompts": "516f74bef25df620",
|
573 |
-
"hash_input_tokens": "09dc2380497f7a47",
|
574 |
-
"hash_cont_tokens": "a52a4f60d98cbe5c"
|
575 |
-
},
|
576 |
-
"truncated": 0,
|
577 |
-
"non_truncated": 135,
|
578 |
-
"padded": 540,
|
579 |
-
"non_padded": 0,
|
580 |
-
"effective_few_shots": 5.0,
|
581 |
-
"num_truncated_few_shots": 0
|
582 |
-
},
|
583 |
-
"harness|hendrycksTest-astronomy|5": {
|
584 |
-
"hashes": {
|
585 |
-
"hash_examples": "7d587b908da4d762",
|
586 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
587 |
-
"hash_input_tokens": "68ca3220b0fdd1f3",
|
588 |
-
"hash_cont_tokens": "10f7d8eeba97841d"
|
589 |
-
},
|
590 |
-
"truncated": 0,
|
591 |
-
"non_truncated": 152,
|
592 |
-
"padded": 608,
|
593 |
-
"non_padded": 0,
|
594 |
-
"effective_few_shots": 5.0,
|
595 |
-
"num_truncated_few_shots": 0
|
596 |
-
},
|
597 |
-
"harness|hendrycksTest-business_ethics|5": {
|
598 |
-
"hashes": {
|
599 |
-
"hash_examples": "33e51740670de686",
|
600 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
601 |
-
"hash_input_tokens": "bd14ef1320de241e",
|
602 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
603 |
-
},
|
604 |
-
"truncated": 0,
|
605 |
-
"non_truncated": 100,
|
606 |
-
"padded": 400,
|
607 |
-
"non_padded": 0,
|
608 |
-
"effective_few_shots": 5.0,
|
609 |
-
"num_truncated_few_shots": 0
|
610 |
-
},
|
611 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
612 |
-
"hashes": {
|
613 |
-
"hash_examples": "f3366dbe7eefffa4",
|
614 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
615 |
-
"hash_input_tokens": "d96186ab98017c43",
|
616 |
-
"hash_cont_tokens": "edef9975ba9165b5"
|
617 |
-
},
|
618 |
-
"truncated": 0,
|
619 |
-
"non_truncated": 265,
|
620 |
-
"padded": 1060,
|
621 |
-
"non_padded": 0,
|
622 |
-
"effective_few_shots": 5.0,
|
623 |
-
"num_truncated_few_shots": 0
|
624 |
-
},
|
625 |
-
"harness|hendrycksTest-college_biology|5": {
|
626 |
-
"hashes": {
|
627 |
-
"hash_examples": "ca2b6753a0193e7f",
|
628 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
629 |
-
"hash_input_tokens": "424136b34e95b200",
|
630 |
-
"hash_cont_tokens": "0aa103ec6602280b"
|
631 |
-
},
|
632 |
-
"truncated": 0,
|
633 |
-
"non_truncated": 144,
|
634 |
-
"padded": 576,
|
635 |
-
"non_padded": 0,
|
636 |
-
"effective_few_shots": 5.0,
|
637 |
-
"num_truncated_few_shots": 0
|
638 |
-
},
|
639 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
640 |
-
"hashes": {
|
641 |
-
"hash_examples": "22ff85f1d34f42d1",
|
642 |
-
"hash_full_prompts": "242c9be6da583e95",
|
643 |
-
"hash_input_tokens": "8dd8b80e336bbe54",
|
644 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
645 |
-
},
|
646 |
-
"truncated": 0,
|
647 |
-
"non_truncated": 100,
|
648 |
-
"padded": 400,
|
649 |
-
"non_padded": 0,
|
650 |
-
"effective_few_shots": 5.0,
|
651 |
-
"num_truncated_few_shots": 0
|
652 |
-
},
|
653 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
654 |
-
"hashes": {
|
655 |
-
"hash_examples": "30318289d717a5cf",
|
656 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
657 |
-
"hash_input_tokens": "145d4cef8ca2261d",
|
658 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
659 |
-
},
|
660 |
-
"truncated": 0,
|
661 |
-
"non_truncated": 100,
|
662 |
-
"padded": 400,
|
663 |
-
"non_padded": 0,
|
664 |
-
"effective_few_shots": 5.0,
|
665 |
-
"num_truncated_few_shots": 0
|
666 |
-
},
|
667 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
668 |
-
"hashes": {
|
669 |
-
"hash_examples": "4944d1f0b6b5d911",
|
670 |
-
"hash_full_prompts": "770bc4281c973190",
|
671 |
-
"hash_input_tokens": "561995d32d2b25c4",
|
672 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
673 |
-
},
|
674 |
-
"truncated": 0,
|
675 |
-
"non_truncated": 100,
|
676 |
-
"padded": 400,
|
677 |
-
"non_padded": 0,
|
678 |
-
"effective_few_shots": 5.0,
|
679 |
-
"num_truncated_few_shots": 0
|
680 |
-
},
|
681 |
-
"harness|hendrycksTest-college_medicine|5": {
|
682 |
-
"hashes": {
|
683 |
-
"hash_examples": "dd69cc33381275af",
|
684 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
685 |
-
"hash_input_tokens": "6a258a9d4418599c",
|
686 |
-
"hash_cont_tokens": "1979021dbc698754"
|
687 |
-
},
|
688 |
-
"truncated": 0,
|
689 |
-
"non_truncated": 173,
|
690 |
-
"padded": 692,
|
691 |
-
"non_padded": 0,
|
692 |
-
"effective_few_shots": 5.0,
|
693 |
-
"num_truncated_few_shots": 0
|
694 |
-
},
|
695 |
-
"harness|hendrycksTest-college_physics|5": {
|
696 |
-
"hashes": {
|
697 |
-
"hash_examples": "875dd26d22655b0d",
|
698 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
699 |
-
"hash_input_tokens": "fa5e0d5b5f97b66a",
|
700 |
-
"hash_cont_tokens": "7cf7fe2bab00acbd"
|
701 |
-
},
|
702 |
-
"truncated": 0,
|
703 |
-
"non_truncated": 102,
|
704 |
-
"padded": 408,
|
705 |
-
"non_padded": 0,
|
706 |
-
"effective_few_shots": 5.0,
|
707 |
-
"num_truncated_few_shots": 0
|
708 |
-
},
|
709 |
-
"harness|hendrycksTest-computer_security|5": {
|
710 |
-
"hashes": {
|
711 |
-
"hash_examples": "006451eedc0ededb",
|
712 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
713 |
-
"hash_input_tokens": "07d27397edfae492",
|
714 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
715 |
-
},
|
716 |
-
"truncated": 0,
|
717 |
-
"non_truncated": 100,
|
718 |
-
"padded": 400,
|
719 |
-
"non_padded": 0,
|
720 |
-
"effective_few_shots": 5.0,
|
721 |
-
"num_truncated_few_shots": 0
|
722 |
-
},
|
723 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
724 |
-
"hashes": {
|
725 |
-
"hash_examples": "8874ece872d2ca4c",
|
726 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
727 |
-
"hash_input_tokens": "da5e6c3c8eb17da6",
|
728 |
-
"hash_cont_tokens": "903f64eed2b0d217"
|
729 |
-
},
|
730 |
-
"truncated": 0,
|
731 |
-
"non_truncated": 235,
|
732 |
-
"padded": 940,
|
733 |
-
"non_padded": 0,
|
734 |
-
"effective_few_shots": 5.0,
|
735 |
-
"num_truncated_few_shots": 0
|
736 |
-
},
|
737 |
-
"harness|hendrycksTest-econometrics|5": {
|
738 |
-
"hashes": {
|
739 |
-
"hash_examples": "64d3623b0bfaa43f",
|
740 |
-
"hash_full_prompts": "612f340fae41338d",
|
741 |
-
"hash_input_tokens": "f6ba8e358bdb523e",
|
742 |
-
"hash_cont_tokens": "721ae6c5302c4bf2"
|
743 |
-
},
|
744 |
-
"truncated": 0,
|
745 |
-
"non_truncated": 114,
|
746 |
-
"padded": 456,
|
747 |
-
"non_padded": 0,
|
748 |
-
"effective_few_shots": 5.0,
|
749 |
-
"num_truncated_few_shots": 0
|
750 |
-
},
|
751 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
752 |
-
"hashes": {
|
753 |
-
"hash_examples": "e98f51780c674d7e",
|
754 |
-
"hash_full_prompts": "10275b312d812ae6",
|
755 |
-
"hash_input_tokens": "b2459da4c5ca8590",
|
756 |
-
"hash_cont_tokens": "15a738960ed3e587"
|
757 |
-
},
|
758 |
-
"truncated": 0,
|
759 |
-
"non_truncated": 145,
|
760 |
-
"padded": 575,
|
761 |
-
"non_padded": 5,
|
762 |
-
"effective_few_shots": 5.0,
|
763 |
-
"num_truncated_few_shots": 0
|
764 |
-
},
|
765 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
766 |
-
"hashes": {
|
767 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
768 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
769 |
-
"hash_input_tokens": "0b969d9ad706a13a",
|
770 |
-
"hash_cont_tokens": "c96470462fc71683"
|
771 |
-
},
|
772 |
-
"truncated": 0,
|
773 |
-
"non_truncated": 378,
|
774 |
-
"padded": 1512,
|
775 |
-
"non_padded": 0,
|
776 |
-
"effective_few_shots": 5.0,
|
777 |
-
"num_truncated_few_shots": 0
|
778 |
-
},
|
779 |
-
"harness|hendrycksTest-formal_logic|5": {
|
780 |
-
"hashes": {
|
781 |
-
"hash_examples": "5a6525665f63ea72",
|
782 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
783 |
-
"hash_input_tokens": "02bc3eb5f90da86e",
|
784 |
-
"hash_cont_tokens": "0e1ce025c9d6ee7e"
|
785 |
-
},
|
786 |
-
"truncated": 0,
|
787 |
-
"non_truncated": 126,
|
788 |
-
"padded": 504,
|
789 |
-
"non_padded": 0,
|
790 |
-
"effective_few_shots": 5.0,
|
791 |
-
"num_truncated_few_shots": 0
|
792 |
-
},
|
793 |
-
"harness|hendrycksTest-global_facts|5": {
|
794 |
-
"hashes": {
|
795 |
-
"hash_examples": "371d70d743b2b89b",
|
796 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
797 |
-
"hash_input_tokens": "3d5106918bcbeb43",
|
798 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
799 |
-
},
|
800 |
-
"truncated": 0,
|
801 |
-
"non_truncated": 100,
|
802 |
-
"padded": 400,
|
803 |
-
"non_padded": 0,
|
804 |
-
"effective_few_shots": 5.0,
|
805 |
-
"num_truncated_few_shots": 0
|
806 |
-
},
|
807 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
808 |
-
"hashes": {
|
809 |
-
"hash_examples": "a79e1018b1674052",
|
810 |
-
"hash_full_prompts": "e624e26ede922561",
|
811 |
-
"hash_input_tokens": "7b089392db2dabbd",
|
812 |
-
"hash_cont_tokens": "e34d57f7d3c4ca16"
|
813 |
-
},
|
814 |
-
"truncated": 0,
|
815 |
-
"non_truncated": 310,
|
816 |
-
"padded": 1240,
|
817 |
-
"non_padded": 0,
|
818 |
-
"effective_few_shots": 5.0,
|
819 |
-
"num_truncated_few_shots": 0
|
820 |
-
},
|
821 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
822 |
-
"hashes": {
|
823 |
-
"hash_examples": "44bfc25c389f0e03",
|
824 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
825 |
-
"hash_input_tokens": "ba90b2ffed1c067d",
|
826 |
-
"hash_cont_tokens": "e8482d44df4b3740"
|
827 |
-
},
|
828 |
-
"truncated": 0,
|
829 |
-
"non_truncated": 203,
|
830 |
-
"padded": 812,
|
831 |
-
"non_padded": 0,
|
832 |
-
"effective_few_shots": 5.0,
|
833 |
-
"num_truncated_few_shots": 0
|
834 |
-
},
|
835 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
836 |
-
"hashes": {
|
837 |
-
"hash_examples": "8b8cdb1084f24169",
|
838 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
839 |
-
"hash_input_tokens": "60eeec309ef0717f",
|
840 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
841 |
-
},
|
842 |
-
"truncated": 0,
|
843 |
-
"non_truncated": 100,
|
844 |
-
"padded": 400,
|
845 |
-
"non_padded": 0,
|
846 |
-
"effective_few_shots": 5.0,
|
847 |
-
"num_truncated_few_shots": 0
|
848 |
-
},
|
849 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
850 |
-
"hashes": {
|
851 |
-
"hash_examples": "11cd32d0ef440171",
|
852 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
853 |
-
"hash_input_tokens": "5e5e8bf3808e0ead",
|
854 |
-
"hash_cont_tokens": "d63e679a49418339"
|
855 |
-
},
|
856 |
-
"truncated": 0,
|
857 |
-
"non_truncated": 165,
|
858 |
-
"padded": 656,
|
859 |
-
"non_padded": 4,
|
860 |
-
"effective_few_shots": 5.0,
|
861 |
-
"num_truncated_few_shots": 0
|
862 |
-
},
|
863 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
864 |
-
"hashes": {
|
865 |
-
"hash_examples": "b60019b9e80b642f",
|
866 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
867 |
-
"hash_input_tokens": "4da9b741d4e7ea78",
|
868 |
-
"hash_cont_tokens": "d78483e286d06f1a"
|
869 |
-
},
|
870 |
-
"truncated": 0,
|
871 |
-
"non_truncated": 198,
|
872 |
-
"padded": 792,
|
873 |
-
"non_padded": 0,
|
874 |
-
"effective_few_shots": 5.0,
|
875 |
-
"num_truncated_few_shots": 0
|
876 |
-
},
|
877 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
878 |
-
"hashes": {
|
879 |
-
"hash_examples": "d221ec983d143dc3",
|
880 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
881 |
-
"hash_input_tokens": "acb4bc872ac86ed7",
|
882 |
-
"hash_cont_tokens": "691cdff71ff5fe57"
|
883 |
-
},
|
884 |
-
"truncated": 0,
|
885 |
-
"non_truncated": 193,
|
886 |
-
"padded": 772,
|
887 |
-
"non_padded": 0,
|
888 |
-
"effective_few_shots": 5.0,
|
889 |
-
"num_truncated_few_shots": 0
|
890 |
-
},
|
891 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
892 |
-
"hashes": {
|
893 |
-
"hash_examples": "59c2915cacfd3fbb",
|
894 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
895 |
-
"hash_input_tokens": "840fc6403eb69ab0",
|
896 |
-
"hash_cont_tokens": "d5ad4c5bdca967ad"
|
897 |
-
},
|
898 |
-
"truncated": 0,
|
899 |
-
"non_truncated": 390,
|
900 |
-
"padded": 1560,
|
901 |
-
"non_padded": 0,
|
902 |
-
"effective_few_shots": 5.0,
|
903 |
-
"num_truncated_few_shots": 0
|
904 |
-
},
|
905 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
906 |
-
"hashes": {
|
907 |
-
"hash_examples": "1f8ac897608de342",
|
908 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
909 |
-
"hash_input_tokens": "3629a7f2cd17faeb",
|
910 |
-
"hash_cont_tokens": "8f631ca5687dd0d4"
|
911 |
-
},
|
912 |
-
"truncated": 0,
|
913 |
-
"non_truncated": 270,
|
914 |
-
"padded": 1080,
|
915 |
-
"non_padded": 0,
|
916 |
-
"effective_few_shots": 5.0,
|
917 |
-
"num_truncated_few_shots": 0
|
918 |
-
},
|
919 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
920 |
-
"hashes": {
|
921 |
-
"hash_examples": "ead6a0f2f6c83370",
|
922 |
-
"hash_full_prompts": "bfc393381298609e",
|
923 |
-
"hash_input_tokens": "6846f684260e3997",
|
924 |
-
"hash_cont_tokens": "7321048a28451473"
|
925 |
-
},
|
926 |
-
"truncated": 0,
|
927 |
-
"non_truncated": 238,
|
928 |
-
"padded": 952,
|
929 |
-
"non_padded": 0,
|
930 |
-
"effective_few_shots": 5.0,
|
931 |
-
"num_truncated_few_shots": 0
|
932 |
-
},
|
933 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
934 |
-
"hashes": {
|
935 |
-
"hash_examples": "c3f2025990afec64",
|
936 |
-
"hash_full_prompts": "fc78b4997e436734",
|
937 |
-
"hash_input_tokens": "85aee25d6bdad94a",
|
938 |
-
"hash_cont_tokens": "bb137581f269861c"
|
939 |
-
},
|
940 |
-
"truncated": 0,
|
941 |
-
"non_truncated": 151,
|
942 |
-
"padded": 604,
|
943 |
-
"non_padded": 0,
|
944 |
-
"effective_few_shots": 5.0,
|
945 |
-
"num_truncated_few_shots": 0
|
946 |
-
},
|
947 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
948 |
-
"hashes": {
|
949 |
-
"hash_examples": "21f8aab618f6d636",
|
950 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
951 |
-
"hash_input_tokens": "290b66d6d666a35f",
|
952 |
-
"hash_cont_tokens": "b455cab2675bd863"
|
953 |
-
},
|
954 |
-
"truncated": 0,
|
955 |
-
"non_truncated": 545,
|
956 |
-
"padded": 2180,
|
957 |
-
"non_padded": 0,
|
958 |
-
"effective_few_shots": 5.0,
|
959 |
-
"num_truncated_few_shots": 0
|
960 |
-
},
|
961 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
962 |
-
"hashes": {
|
963 |
-
"hash_examples": "2386a60a11fc5de3",
|
964 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
965 |
-
"hash_input_tokens": "a77a7668b437bc82",
|
966 |
-
"hash_cont_tokens": "1b3196fec7e58037"
|
967 |
-
},
|
968 |
-
"truncated": 0,
|
969 |
-
"non_truncated": 216,
|
970 |
-
"padded": 864,
|
971 |
-
"non_padded": 0,
|
972 |
-
"effective_few_shots": 5.0,
|
973 |
-
"num_truncated_few_shots": 0
|
974 |
-
},
|
975 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
976 |
-
"hashes": {
|
977 |
-
"hash_examples": "74961543be40f04f",
|
978 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
979 |
-
"hash_input_tokens": "63548c7fa9ba7a78",
|
980 |
-
"hash_cont_tokens": "a331dedc2aa01b3e"
|
981 |
-
},
|
982 |
-
"truncated": 0,
|
983 |
-
"non_truncated": 204,
|
984 |
-
"padded": 816,
|
985 |
-
"non_padded": 0,
|
986 |
-
"effective_few_shots": 5.0,
|
987 |
-
"num_truncated_few_shots": 0
|
988 |
-
},
|
989 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
990 |
-
"hashes": {
|
991 |
-
"hash_examples": "2ad2f6b7198b2234",
|
992 |
-
"hash_full_prompts": "11845057459afd72",
|
993 |
-
"hash_input_tokens": "83c5da18bfa50812",
|
994 |
-
"hash_cont_tokens": "d0fbe030b8c8c2bf"
|
995 |
-
},
|
996 |
-
"truncated": 0,
|
997 |
-
"non_truncated": 237,
|
998 |
-
"padded": 948,
|
999 |
-
"non_padded": 0,
|
1000 |
-
"effective_few_shots": 5.0,
|
1001 |
-
"num_truncated_few_shots": 0
|
1002 |
-
},
|
1003 |
-
"harness|hendrycksTest-human_aging|5": {
|
1004 |
-
"hashes": {
|
1005 |
-
"hash_examples": "1a7199dc733e779b",
|
1006 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
1007 |
-
"hash_input_tokens": "bebbd11f22006685",
|
1008 |
-
"hash_cont_tokens": "1dd29c3755494850"
|
1009 |
-
},
|
1010 |
-
"truncated": 0,
|
1011 |
-
"non_truncated": 223,
|
1012 |
-
"padded": 892,
|
1013 |
-
"non_padded": 0,
|
1014 |
-
"effective_few_shots": 5.0,
|
1015 |
-
"num_truncated_few_shots": 0
|
1016 |
-
},
|
1017 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1018 |
-
"hashes": {
|
1019 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1020 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1021 |
-
"hash_input_tokens": "7b85ee9b8ee54f4f",
|
1022 |
-
"hash_cont_tokens": "c85573f663c10691"
|
1023 |
-
},
|
1024 |
-
"truncated": 0,
|
1025 |
-
"non_truncated": 131,
|
1026 |
-
"padded": 524,
|
1027 |
-
"non_padded": 0,
|
1028 |
-
"effective_few_shots": 5.0,
|
1029 |
-
"num_truncated_few_shots": 0
|
1030 |
-
},
|
1031 |
-
"harness|hendrycksTest-international_law|5": {
|
1032 |
-
"hashes": {
|
1033 |
-
"hash_examples": "1300bfd0dfc59114",
|
1034 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1035 |
-
"hash_input_tokens": "7bfc55ab7065943e",
|
1036 |
-
"hash_cont_tokens": "d263804ba918154f"
|
1037 |
-
},
|
1038 |
-
"truncated": 0,
|
1039 |
-
"non_truncated": 121,
|
1040 |
-
"padded": 484,
|
1041 |
-
"non_padded": 0,
|
1042 |
-
"effective_few_shots": 5.0,
|
1043 |
-
"num_truncated_few_shots": 0
|
1044 |
-
},
|
1045 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1046 |
-
"hashes": {
|
1047 |
-
"hash_examples": "083b1e4904c48dc2",
|
1048 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1049 |
-
"hash_input_tokens": "69573f1675e053c6",
|
1050 |
-
"hash_cont_tokens": "581986691a84ece8"
|
1051 |
-
},
|
1052 |
-
"truncated": 0,
|
1053 |
-
"non_truncated": 108,
|
1054 |
-
"padded": 432,
|
1055 |
-
"non_padded": 0,
|
1056 |
-
"effective_few_shots": 5.0,
|
1057 |
-
"num_truncated_few_shots": 0
|
1058 |
-
},
|
1059 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1060 |
-
"hashes": {
|
1061 |
-
"hash_examples": "709128f9926a634c",
|
1062 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1063 |
-
"hash_input_tokens": "552324ef20094bdc",
|
1064 |
-
"hash_cont_tokens": "55a858b28bbda458"
|
1065 |
-
},
|
1066 |
-
"truncated": 0,
|
1067 |
-
"non_truncated": 163,
|
1068 |
-
"padded": 652,
|
1069 |
-
"non_padded": 0,
|
1070 |
-
"effective_few_shots": 5.0,
|
1071 |
-
"num_truncated_few_shots": 0
|
1072 |
-
},
|
1073 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1074 |
-
"hashes": {
|
1075 |
-
"hash_examples": "88f22a636029ae47",
|
1076 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1077 |
-
"hash_input_tokens": "96449357a7318905",
|
1078 |
-
"hash_cont_tokens": "e99d3d3efd4ac7a3"
|
1079 |
-
},
|
1080 |
-
"truncated": 0,
|
1081 |
-
"non_truncated": 112,
|
1082 |
-
"padded": 448,
|
1083 |
-
"non_padded": 0,
|
1084 |
-
"effective_few_shots": 5.0,
|
1085 |
-
"num_truncated_few_shots": 0
|
1086 |
-
},
|
1087 |
-
"harness|hendrycksTest-management|5": {
|
1088 |
-
"hashes": {
|
1089 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1090 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1091 |
-
"hash_input_tokens": "3b849249168e3b88",
|
1092 |
-
"hash_cont_tokens": "13d9dc56bca34726"
|
1093 |
-
},
|
1094 |
-
"truncated": 0,
|
1095 |
-
"non_truncated": 103,
|
1096 |
-
"padded": 412,
|
1097 |
-
"non_padded": 0,
|
1098 |
-
"effective_few_shots": 5.0,
|
1099 |
-
"num_truncated_few_shots": 0
|
1100 |
-
},
|
1101 |
-
"harness|hendrycksTest-marketing|5": {
|
1102 |
-
"hashes": {
|
1103 |
-
"hash_examples": "2668953431f91e96",
|
1104 |
-
"hash_full_prompts": "77562bef997c7650",
|
1105 |
-
"hash_input_tokens": "af0e186f2756b70d",
|
1106 |
-
"hash_cont_tokens": "2700ea26933916a2"
|
1107 |
-
},
|
1108 |
-
"truncated": 0,
|
1109 |
-
"non_truncated": 234,
|
1110 |
-
"padded": 936,
|
1111 |
-
"non_padded": 0,
|
1112 |
-
"effective_few_shots": 5.0,
|
1113 |
-
"num_truncated_few_shots": 0
|
1114 |
-
},
|
1115 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1116 |
-
"hashes": {
|
1117 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1118 |
-
"hash_full_prompts": "202139046daa118f",
|
1119 |
-
"hash_input_tokens": "9f6a6de16509b6d9",
|
1120 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1121 |
-
},
|
1122 |
-
"truncated": 0,
|
1123 |
-
"non_truncated": 100,
|
1124 |
-
"padded": 400,
|
1125 |
-
"non_padded": 0,
|
1126 |
-
"effective_few_shots": 5.0,
|
1127 |
-
"num_truncated_few_shots": 0
|
1128 |
-
},
|
1129 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1130 |
-
"hashes": {
|
1131 |
-
"hash_examples": "41adb694024809c2",
|
1132 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1133 |
-
"hash_input_tokens": "9194406d589f7c10",
|
1134 |
-
"hash_cont_tokens": "7bf4341c79587250"
|
1135 |
-
},
|
1136 |
-
"truncated": 0,
|
1137 |
-
"non_truncated": 783,
|
1138 |
-
"padded": 3132,
|
1139 |
-
"non_padded": 0,
|
1140 |
-
"effective_few_shots": 5.0,
|
1141 |
-
"num_truncated_few_shots": 0
|
1142 |
-
},
|
1143 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1144 |
-
"hashes": {
|
1145 |
-
"hash_examples": "3171c13ba3c594c4",
|
1146 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1147 |
-
"hash_input_tokens": "769486efc74d9f8e",
|
1148 |
-
"hash_cont_tokens": "38a48e9de6976f00"
|
1149 |
-
},
|
1150 |
-
"truncated": 0,
|
1151 |
-
"non_truncated": 346,
|
1152 |
-
"padded": 1384,
|
1153 |
-
"non_padded": 0,
|
1154 |
-
"effective_few_shots": 5.0,
|
1155 |
-
"num_truncated_few_shots": 0
|
1156 |
-
},
|
1157 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1158 |
-
"hashes": {
|
1159 |
-
"hash_examples": "9873e077e83e0546",
|
1160 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1161 |
-
"hash_input_tokens": "a90fd4dd90959dad",
|
1162 |
-
"hash_cont_tokens": "761c4dc187689d89"
|
1163 |
-
},
|
1164 |
-
"truncated": 0,
|
1165 |
-
"non_truncated": 895,
|
1166 |
-
"padded": 3580,
|
1167 |
-
"non_padded": 0,
|
1168 |
-
"effective_few_shots": 5.0,
|
1169 |
-
"num_truncated_few_shots": 0
|
1170 |
-
},
|
1171 |
-
"harness|hendrycksTest-nutrition|5": {
|
1172 |
-
"hashes": {
|
1173 |
-
"hash_examples": "7db1d8142ec14323",
|
1174 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1175 |
-
"hash_input_tokens": "1a3b843e66efd29b",
|
1176 |
-
"hash_cont_tokens": "65005bd7d6f6012a"
|
1177 |
-
},
|
1178 |
-
"truncated": 0,
|
1179 |
-
"non_truncated": 306,
|
1180 |
-
"padded": 1224,
|
1181 |
-
"non_padded": 0,
|
1182 |
-
"effective_few_shots": 5.0,
|
1183 |
-
"num_truncated_few_shots": 0
|
1184 |
-
},
|
1185 |
-
"harness|hendrycksTest-philosophy|5": {
|
1186 |
-
"hashes": {
|
1187 |
-
"hash_examples": "9b455b7d72811cc8",
|
1188 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1189 |
-
"hash_input_tokens": "09820001a3d00013",
|
1190 |
-
"hash_cont_tokens": "0b47934fb6314dec"
|
1191 |
-
},
|
1192 |
-
"truncated": 0,
|
1193 |
-
"non_truncated": 311,
|
1194 |
-
"padded": 1244,
|
1195 |
-
"non_padded": 0,
|
1196 |
-
"effective_few_shots": 5.0,
|
1197 |
-
"num_truncated_few_shots": 0
|
1198 |
-
},
|
1199 |
-
"harness|hendrycksTest-prehistory|5": {
|
1200 |
-
"hashes": {
|
1201 |
-
"hash_examples": "8be90d0f538f1560",
|
1202 |
-
"hash_full_prompts": "152187949bcd0921",
|
1203 |
-
"hash_input_tokens": "7c4ec364ce2768c7",
|
1204 |
-
"hash_cont_tokens": "3f20acd855ee0a29"
|
1205 |
-
},
|
1206 |
-
"truncated": 0,
|
1207 |
-
"non_truncated": 324,
|
1208 |
-
"padded": 1296,
|
1209 |
-
"non_padded": 0,
|
1210 |
-
"effective_few_shots": 5.0,
|
1211 |
-
"num_truncated_few_shots": 0
|
1212 |
-
},
|
1213 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1214 |
-
"hashes": {
|
1215 |
-
"hash_examples": "8d377597916cd07e",
|
1216 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1217 |
-
"hash_input_tokens": "ced0534574d0ae3f",
|
1218 |
-
"hash_cont_tokens": "8f122ba881355d4b"
|
1219 |
-
},
|
1220 |
-
"truncated": 0,
|
1221 |
-
"non_truncated": 282,
|
1222 |
-
"padded": 1128,
|
1223 |
-
"non_padded": 0,
|
1224 |
-
"effective_few_shots": 5.0,
|
1225 |
-
"num_truncated_few_shots": 0
|
1226 |
-
},
|
1227 |
-
"harness|hendrycksTest-professional_law|5": {
|
1228 |
-
"hashes": {
|
1229 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1230 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1231 |
-
"hash_input_tokens": "bcbdbbde22ec73e3",
|
1232 |
-
"hash_cont_tokens": "90d5df417c4d3fd3"
|
1233 |
-
},
|
1234 |
-
"truncated": 0,
|
1235 |
-
"non_truncated": 1534,
|
1236 |
-
"padded": 6136,
|
1237 |
-
"non_padded": 0,
|
1238 |
-
"effective_few_shots": 5.0,
|
1239 |
-
"num_truncated_few_shots": 0
|
1240 |
-
},
|
1241 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1242 |
-
"hashes": {
|
1243 |
-
"hash_examples": "b20e4e816c1e383e",
|
1244 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1245 |
-
"hash_input_tokens": "c54d753563114d45",
|
1246 |
-
"hash_cont_tokens": "4a2d2988884f7f70"
|
1247 |
-
},
|
1248 |
-
"truncated": 0,
|
1249 |
-
"non_truncated": 272,
|
1250 |
-
"padded": 1088,
|
1251 |
-
"non_padded": 0,
|
1252 |
-
"effective_few_shots": 5.0,
|
1253 |
-
"num_truncated_few_shots": 0
|
1254 |
-
},
|
1255 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1256 |
-
"hashes": {
|
1257 |
-
"hash_examples": "d45b73b22f9cc039",
|
1258 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1259 |
-
"hash_input_tokens": "b75dc55c0e32fa52",
|
1260 |
-
"hash_cont_tokens": "e0a952cb8a9c81de"
|
1261 |
-
},
|
1262 |
-
"truncated": 0,
|
1263 |
-
"non_truncated": 612,
|
1264 |
-
"padded": 2448,
|
1265 |
-
"non_padded": 0,
|
1266 |
-
"effective_few_shots": 5.0,
|
1267 |
-
"num_truncated_few_shots": 0
|
1268 |
-
},
|
1269 |
-
"harness|hendrycksTest-public_relations|5": {
|
1270 |
-
"hashes": {
|
1271 |
-
"hash_examples": "0d25072e1761652a",
|
1272 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1273 |
-
"hash_input_tokens": "5ccdc8ec8db99622",
|
1274 |
-
"hash_cont_tokens": "1fa77a8dff3922b8"
|
1275 |
-
},
|
1276 |
-
"truncated": 0,
|
1277 |
-
"non_truncated": 110,
|
1278 |
-
"padded": 440,
|
1279 |
-
"non_padded": 0,
|
1280 |
-
"effective_few_shots": 5.0,
|
1281 |
-
"num_truncated_few_shots": 0
|
1282 |
-
},
|
1283 |
-
"harness|hendrycksTest-security_studies|5": {
|
1284 |
-
"hashes": {
|
1285 |
-
"hash_examples": "62bb8197e63d60d4",
|
1286 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1287 |
-
"hash_input_tokens": "ca8497342e5b1d57",
|
1288 |
-
"hash_cont_tokens": "81fc9cb3cbdd52db"
|
1289 |
-
},
|
1290 |
-
"truncated": 0,
|
1291 |
-
"non_truncated": 245,
|
1292 |
-
"padded": 980,
|
1293 |
-
"non_padded": 0,
|
1294 |
-
"effective_few_shots": 5.0,
|
1295 |
-
"num_truncated_few_shots": 0
|
1296 |
-
},
|
1297 |
-
"harness|hendrycksTest-sociology|5": {
|
1298 |
-
"hashes": {
|
1299 |
-
"hash_examples": "e7959df87dea8672",
|
1300 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1301 |
-
"hash_input_tokens": "069c76424fbd3dab",
|
1302 |
-
"hash_cont_tokens": "2a0493252ed2cf43"
|
1303 |
-
},
|
1304 |
-
"truncated": 0,
|
1305 |
-
"non_truncated": 201,
|
1306 |
-
"padded": 804,
|
1307 |
-
"non_padded": 0,
|
1308 |
-
"effective_few_shots": 5.0,
|
1309 |
-
"num_truncated_few_shots": 0
|
1310 |
-
},
|
1311 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1312 |
-
"hashes": {
|
1313 |
-
"hash_examples": "4a56a01ddca44dca",
|
1314 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1315 |
-
"hash_input_tokens": "a7e393a626169576",
|
1316 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1317 |
-
},
|
1318 |
-
"truncated": 0,
|
1319 |
-
"non_truncated": 100,
|
1320 |
-
"padded": 400,
|
1321 |
-
"non_padded": 0,
|
1322 |
-
"effective_few_shots": 5.0,
|
1323 |
-
"num_truncated_few_shots": 0
|
1324 |
-
},
|
1325 |
-
"harness|hendrycksTest-virology|5": {
|
1326 |
-
"hashes": {
|
1327 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1328 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1329 |
-
"hash_input_tokens": "bf99dc973e3a650d",
|
1330 |
-
"hash_cont_tokens": "5ab892d003b00c98"
|
1331 |
-
},
|
1332 |
-
"truncated": 0,
|
1333 |
-
"non_truncated": 166,
|
1334 |
-
"padded": 664,
|
1335 |
-
"non_padded": 0,
|
1336 |
-
"effective_few_shots": 5.0,
|
1337 |
-
"num_truncated_few_shots": 0
|
1338 |
-
},
|
1339 |
-
"harness|hendrycksTest-world_religions|5": {
|
1340 |
-
"hashes": {
|
1341 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1342 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1343 |
-
"hash_input_tokens": "1761cfaf21797065",
|
1344 |
-
"hash_cont_tokens": "15a5e5dbdfbb8568"
|
1345 |
-
},
|
1346 |
-
"truncated": 0,
|
1347 |
-
"non_truncated": 171,
|
1348 |
-
"padded": 684,
|
1349 |
-
"non_padded": 0,
|
1350 |
-
"effective_few_shots": 5.0,
|
1351 |
-
"num_truncated_few_shots": 0
|
1352 |
-
},
|
1353 |
-
"harness|truthfulqa:mc|0": {
|
1354 |
-
"hashes": {
|
1355 |
-
"hash_examples": "23176c0531c7b867",
|
1356 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1357 |
-
"hash_input_tokens": "298b43914bbdf4ca",
|
1358 |
-
"hash_cont_tokens": "5a8d4bb398b1c3c0"
|
1359 |
-
},
|
1360 |
-
"truncated": 0,
|
1361 |
-
"non_truncated": 817,
|
1362 |
-
"padded": 9996,
|
1363 |
-
"non_padded": 0,
|
1364 |
-
"effective_few_shots": 0.0,
|
1365 |
-
"num_truncated_few_shots": 0
|
1366 |
-
},
|
1367 |
-
"harness|winogrande|5": {
|
1368 |
-
"hashes": {
|
1369 |
-
"hash_examples": "aada0a176fd81218",
|
1370 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
1371 |
-
"hash_input_tokens": "31aa3477d959f771",
|
1372 |
-
"hash_cont_tokens": "618558fb93c0f288"
|
1373 |
-
},
|
1374 |
-
"truncated": 0,
|
1375 |
-
"non_truncated": 1267,
|
1376 |
-
"padded": 2534,
|
1377 |
-
"non_padded": 0,
|
1378 |
-
"effective_few_shots": 5.0,
|
1379 |
-
"num_truncated_few_shots": 0
|
1380 |
-
},
|
1381 |
-
"harness|gsm8k|5": {
|
1382 |
-
"hashes": {
|
1383 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
1384 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
1385 |
-
"hash_input_tokens": "6af0ae8cfe684f50",
|
1386 |
-
"hash_cont_tokens": "01374f708aeddf82"
|
1387 |
-
},
|
1388 |
-
"truncated": 0,
|
1389 |
-
"non_truncated": 1319,
|
1390 |
-
"padded": 0,
|
1391 |
-
"non_padded": 1319,
|
1392 |
-
"effective_few_shots": 5.0,
|
1393 |
-
"num_truncated_few_shots": 0
|
1394 |
-
}
|
1395 |
-
},
|
1396 |
-
"summary_general": {
|
1397 |
-
"hashes": {
|
1398 |
-
"hash_examples": "3b7fa57a057f9415",
|
1399 |
-
"hash_full_prompts": "63615fc50fc9417c",
|
1400 |
-
"hash_input_tokens": "9c04e828ae29cacc",
|
1401 |
-
"hash_cont_tokens": "4b585fc841a0e774"
|
1402 |
-
},
|
1403 |
-
"truncated": 0,
|
1404 |
-
"non_truncated": 28659,
|
1405 |
-
"padded": 113460,
|
1406 |
-
"non_padded": 1412,
|
1407 |
-
"num_truncated_few_shots": 0
|
1408 |
-
}
|
1409 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/SOLAR-10.7B-v1.0/results_2023-12-13T16-09-54.285787.json
DELETED
@@ -1,1409 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"lighteval_sha": "0e4607eff593f6f842aeaa0e5fa6760f58b9d1e9",
|
4 |
-
"num_few_shot_default": 0,
|
5 |
-
"num_fewshot_seeds": 1,
|
6 |
-
"override_batch_size": 1,
|
7 |
-
"max_samples": null,
|
8 |
-
"job_id": "",
|
9 |
-
"start_time": 101672.357715157,
|
10 |
-
"end_time": 114747.99228277,
|
11 |
-
"total_evaluation_time_secondes": "13075.634567613",
|
12 |
-
"model_name": "upstage/SOLAR-10.7B-v1.0",
|
13 |
-
"model_sha": "6e2783822f35c376ea96852fe479faa6a8bf09cb",
|
14 |
-
"model_dtype": "torch.float16",
|
15 |
-
"model_size": "20.08 GB"
|
16 |
-
},
|
17 |
-
"results": {
|
18 |
-
"harness|arc:challenge|25": {
|
19 |
-
"acc": 0.5870307167235495,
|
20 |
-
"acc_stderr": 0.014388344935398324,
|
21 |
-
"acc_norm": 0.6194539249146758,
|
22 |
-
"acc_norm_stderr": 0.014188277712349812
|
23 |
-
},
|
24 |
-
"harness|hellaswag|10": {
|
25 |
-
"acc": 0.6542521410077674,
|
26 |
-
"acc_stderr": 0.0047463946133845395,
|
27 |
-
"acc_norm": 0.8460466042620992,
|
28 |
-
"acc_norm_stderr": 0.0036016648387189156
|
29 |
-
},
|
30 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
31 |
-
"acc": 0.38,
|
32 |
-
"acc_stderr": 0.048783173121456316,
|
33 |
-
"acc_norm": 0.38,
|
34 |
-
"acc_norm_stderr": 0.048783173121456316
|
35 |
-
},
|
36 |
-
"harness|hendrycksTest-anatomy|5": {
|
37 |
-
"acc": 0.6,
|
38 |
-
"acc_stderr": 0.04232073695151589,
|
39 |
-
"acc_norm": 0.6,
|
40 |
-
"acc_norm_stderr": 0.04232073695151589
|
41 |
-
},
|
42 |
-
"harness|hendrycksTest-astronomy|5": {
|
43 |
-
"acc": 0.75,
|
44 |
-
"acc_stderr": 0.03523807393012047,
|
45 |
-
"acc_norm": 0.75,
|
46 |
-
"acc_norm_stderr": 0.03523807393012047
|
47 |
-
},
|
48 |
-
"harness|hendrycksTest-business_ethics|5": {
|
49 |
-
"acc": 0.67,
|
50 |
-
"acc_stderr": 0.04725815626252607,
|
51 |
-
"acc_norm": 0.67,
|
52 |
-
"acc_norm_stderr": 0.04725815626252607
|
53 |
-
},
|
54 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
55 |
-
"acc": 0.7094339622641509,
|
56 |
-
"acc_stderr": 0.027943219989337142,
|
57 |
-
"acc_norm": 0.7094339622641509,
|
58 |
-
"acc_norm_stderr": 0.027943219989337142
|
59 |
-
},
|
60 |
-
"harness|hendrycksTest-college_biology|5": {
|
61 |
-
"acc": 0.7638888888888888,
|
62 |
-
"acc_stderr": 0.03551446610810826,
|
63 |
-
"acc_norm": 0.7638888888888888,
|
64 |
-
"acc_norm_stderr": 0.03551446610810826
|
65 |
-
},
|
66 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
67 |
-
"acc": 0.45,
|
68 |
-
"acc_stderr": 0.05,
|
69 |
-
"acc_norm": 0.45,
|
70 |
-
"acc_norm_stderr": 0.05
|
71 |
-
},
|
72 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
73 |
-
"acc": 0.54,
|
74 |
-
"acc_stderr": 0.05009082659620333,
|
75 |
-
"acc_norm": 0.54,
|
76 |
-
"acc_norm_stderr": 0.05009082659620333
|
77 |
-
},
|
78 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
79 |
-
"acc": 0.38,
|
80 |
-
"acc_stderr": 0.04878317312145632,
|
81 |
-
"acc_norm": 0.38,
|
82 |
-
"acc_norm_stderr": 0.04878317312145632
|
83 |
-
},
|
84 |
-
"harness|hendrycksTest-college_medicine|5": {
|
85 |
-
"acc": 0.6705202312138728,
|
86 |
-
"acc_stderr": 0.03583901754736412,
|
87 |
-
"acc_norm": 0.6705202312138728,
|
88 |
-
"acc_norm_stderr": 0.03583901754736412
|
89 |
-
},
|
90 |
-
"harness|hendrycksTest-college_physics|5": {
|
91 |
-
"acc": 0.35294117647058826,
|
92 |
-
"acc_stderr": 0.04755129616062946,
|
93 |
-
"acc_norm": 0.35294117647058826,
|
94 |
-
"acc_norm_stderr": 0.04755129616062946
|
95 |
-
},
|
96 |
-
"harness|hendrycksTest-computer_security|5": {
|
97 |
-
"acc": 0.74,
|
98 |
-
"acc_stderr": 0.04408440022768078,
|
99 |
-
"acc_norm": 0.74,
|
100 |
-
"acc_norm_stderr": 0.04408440022768078
|
101 |
-
},
|
102 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
103 |
-
"acc": 0.5829787234042553,
|
104 |
-
"acc_stderr": 0.03223276266711712,
|
105 |
-
"acc_norm": 0.5829787234042553,
|
106 |
-
"acc_norm_stderr": 0.03223276266711712
|
107 |
-
},
|
108 |
-
"harness|hendrycksTest-econometrics|5": {
|
109 |
-
"acc": 0.5,
|
110 |
-
"acc_stderr": 0.047036043419179864,
|
111 |
-
"acc_norm": 0.5,
|
112 |
-
"acc_norm_stderr": 0.047036043419179864
|
113 |
-
},
|
114 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
115 |
-
"acc": 0.6068965517241379,
|
116 |
-
"acc_stderr": 0.0407032901370707,
|
117 |
-
"acc_norm": 0.6068965517241379,
|
118 |
-
"acc_norm_stderr": 0.0407032901370707
|
119 |
-
},
|
120 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
121 |
-
"acc": 0.42592592592592593,
|
122 |
-
"acc_stderr": 0.025467149045469536,
|
123 |
-
"acc_norm": 0.42592592592592593,
|
124 |
-
"acc_norm_stderr": 0.025467149045469536
|
125 |
-
},
|
126 |
-
"harness|hendrycksTest-formal_logic|5": {
|
127 |
-
"acc": 0.4365079365079365,
|
128 |
-
"acc_stderr": 0.04435932892851466,
|
129 |
-
"acc_norm": 0.4365079365079365,
|
130 |
-
"acc_norm_stderr": 0.04435932892851466
|
131 |
-
},
|
132 |
-
"harness|hendrycksTest-global_facts|5": {
|
133 |
-
"acc": 0.32,
|
134 |
-
"acc_stderr": 0.04688261722621504,
|
135 |
-
"acc_norm": 0.32,
|
136 |
-
"acc_norm_stderr": 0.04688261722621504
|
137 |
-
},
|
138 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
139 |
-
"acc": 0.7967741935483871,
|
140 |
-
"acc_stderr": 0.022891687984554963,
|
141 |
-
"acc_norm": 0.7967741935483871,
|
142 |
-
"acc_norm_stderr": 0.022891687984554963
|
143 |
-
},
|
144 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
145 |
-
"acc": 0.4827586206896552,
|
146 |
-
"acc_stderr": 0.035158955511656986,
|
147 |
-
"acc_norm": 0.4827586206896552,
|
148 |
-
"acc_norm_stderr": 0.035158955511656986
|
149 |
-
},
|
150 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
151 |
-
"acc": 0.65,
|
152 |
-
"acc_stderr": 0.0479372485441102,
|
153 |
-
"acc_norm": 0.65,
|
154 |
-
"acc_norm_stderr": 0.0479372485441102
|
155 |
-
},
|
156 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
157 |
-
"acc": 0.7878787878787878,
|
158 |
-
"acc_stderr": 0.031922715695482995,
|
159 |
-
"acc_norm": 0.7878787878787878,
|
160 |
-
"acc_norm_stderr": 0.031922715695482995
|
161 |
-
},
|
162 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
163 |
-
"acc": 0.8434343434343434,
|
164 |
-
"acc_stderr": 0.025890520358141454,
|
165 |
-
"acc_norm": 0.8434343434343434,
|
166 |
-
"acc_norm_stderr": 0.025890520358141454
|
167 |
-
},
|
168 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
169 |
-
"acc": 0.9222797927461139,
|
170 |
-
"acc_stderr": 0.019321805557223154,
|
171 |
-
"acc_norm": 0.9222797927461139,
|
172 |
-
"acc_norm_stderr": 0.019321805557223154
|
173 |
-
},
|
174 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
175 |
-
"acc": 0.6564102564102564,
|
176 |
-
"acc_stderr": 0.024078696580635474,
|
177 |
-
"acc_norm": 0.6564102564102564,
|
178 |
-
"acc_norm_stderr": 0.024078696580635474
|
179 |
-
},
|
180 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
181 |
-
"acc": 0.37777777777777777,
|
182 |
-
"acc_stderr": 0.029560707392465715,
|
183 |
-
"acc_norm": 0.37777777777777777,
|
184 |
-
"acc_norm_stderr": 0.029560707392465715
|
185 |
-
},
|
186 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
187 |
-
"acc": 0.6890756302521008,
|
188 |
-
"acc_stderr": 0.030066761582977934,
|
189 |
-
"acc_norm": 0.6890756302521008,
|
190 |
-
"acc_norm_stderr": 0.030066761582977934
|
191 |
-
},
|
192 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
193 |
-
"acc": 0.3509933774834437,
|
194 |
-
"acc_stderr": 0.03896981964257375,
|
195 |
-
"acc_norm": 0.3509933774834437,
|
196 |
-
"acc_norm_stderr": 0.03896981964257375
|
197 |
-
},
|
198 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
199 |
-
"acc": 0.8422018348623853,
|
200 |
-
"acc_stderr": 0.015630022970092434,
|
201 |
-
"acc_norm": 0.8422018348623853,
|
202 |
-
"acc_norm_stderr": 0.015630022970092434
|
203 |
-
},
|
204 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
205 |
-
"acc": 0.6111111111111112,
|
206 |
-
"acc_stderr": 0.033247089118091176,
|
207 |
-
"acc_norm": 0.6111111111111112,
|
208 |
-
"acc_norm_stderr": 0.033247089118091176
|
209 |
-
},
|
210 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
211 |
-
"acc": 0.8529411764705882,
|
212 |
-
"acc_stderr": 0.024857478080250454,
|
213 |
-
"acc_norm": 0.8529411764705882,
|
214 |
-
"acc_norm_stderr": 0.024857478080250454
|
215 |
-
},
|
216 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
217 |
-
"acc": 0.8270042194092827,
|
218 |
-
"acc_stderr": 0.02462156286676842,
|
219 |
-
"acc_norm": 0.8270042194092827,
|
220 |
-
"acc_norm_stderr": 0.02462156286676842
|
221 |
-
},
|
222 |
-
"harness|hendrycksTest-human_aging|5": {
|
223 |
-
"acc": 0.7130044843049327,
|
224 |
-
"acc_stderr": 0.030360379710291947,
|
225 |
-
"acc_norm": 0.7130044843049327,
|
226 |
-
"acc_norm_stderr": 0.030360379710291947
|
227 |
-
},
|
228 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
229 |
-
"acc": 0.7557251908396947,
|
230 |
-
"acc_stderr": 0.03768335959728744,
|
231 |
-
"acc_norm": 0.7557251908396947,
|
232 |
-
"acc_norm_stderr": 0.03768335959728744
|
233 |
-
},
|
234 |
-
"harness|hendrycksTest-international_law|5": {
|
235 |
-
"acc": 0.8016528925619835,
|
236 |
-
"acc_stderr": 0.036401182719909456,
|
237 |
-
"acc_norm": 0.8016528925619835,
|
238 |
-
"acc_norm_stderr": 0.036401182719909456
|
239 |
-
},
|
240 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
241 |
-
"acc": 0.7777777777777778,
|
242 |
-
"acc_stderr": 0.040191074725573483,
|
243 |
-
"acc_norm": 0.7777777777777778,
|
244 |
-
"acc_norm_stderr": 0.040191074725573483
|
245 |
-
},
|
246 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
247 |
-
"acc": 0.7668711656441718,
|
248 |
-
"acc_stderr": 0.0332201579577674,
|
249 |
-
"acc_norm": 0.7668711656441718,
|
250 |
-
"acc_norm_stderr": 0.0332201579577674
|
251 |
-
},
|
252 |
-
"harness|hendrycksTest-machine_learning|5": {
|
253 |
-
"acc": 0.42857142857142855,
|
254 |
-
"acc_stderr": 0.04697113923010212,
|
255 |
-
"acc_norm": 0.42857142857142855,
|
256 |
-
"acc_norm_stderr": 0.04697113923010212
|
257 |
-
},
|
258 |
-
"harness|hendrycksTest-management|5": {
|
259 |
-
"acc": 0.8155339805825242,
|
260 |
-
"acc_stderr": 0.03840423627288276,
|
261 |
-
"acc_norm": 0.8155339805825242,
|
262 |
-
"acc_norm_stderr": 0.03840423627288276
|
263 |
-
},
|
264 |
-
"harness|hendrycksTest-marketing|5": {
|
265 |
-
"acc": 0.8760683760683761,
|
266 |
-
"acc_stderr": 0.021586494001281382,
|
267 |
-
"acc_norm": 0.8760683760683761,
|
268 |
-
"acc_norm_stderr": 0.021586494001281382
|
269 |
-
},
|
270 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
271 |
-
"acc": 0.75,
|
272 |
-
"acc_stderr": 0.04351941398892446,
|
273 |
-
"acc_norm": 0.75,
|
274 |
-
"acc_norm_stderr": 0.04351941398892446
|
275 |
-
},
|
276 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
277 |
-
"acc": 0.8275862068965517,
|
278 |
-
"acc_stderr": 0.013507943909371803,
|
279 |
-
"acc_norm": 0.8275862068965517,
|
280 |
-
"acc_norm_stderr": 0.013507943909371803
|
281 |
-
},
|
282 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
283 |
-
"acc": 0.7254335260115607,
|
284 |
-
"acc_stderr": 0.024027745155265023,
|
285 |
-
"acc_norm": 0.7254335260115607,
|
286 |
-
"acc_norm_stderr": 0.024027745155265023
|
287 |
-
},
|
288 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
289 |
-
"acc": 0.25027932960893856,
|
290 |
-
"acc_stderr": 0.01448750085285042,
|
291 |
-
"acc_norm": 0.25027932960893856,
|
292 |
-
"acc_norm_stderr": 0.01448750085285042
|
293 |
-
},
|
294 |
-
"harness|hendrycksTest-nutrition|5": {
|
295 |
-
"acc": 0.7875816993464052,
|
296 |
-
"acc_stderr": 0.02342037547829613,
|
297 |
-
"acc_norm": 0.7875816993464052,
|
298 |
-
"acc_norm_stderr": 0.02342037547829613
|
299 |
-
},
|
300 |
-
"harness|hendrycksTest-philosophy|5": {
|
301 |
-
"acc": 0.6945337620578779,
|
302 |
-
"acc_stderr": 0.02616058445014045,
|
303 |
-
"acc_norm": 0.6945337620578779,
|
304 |
-
"acc_norm_stderr": 0.02616058445014045
|
305 |
-
},
|
306 |
-
"harness|hendrycksTest-prehistory|5": {
|
307 |
-
"acc": 0.7654320987654321,
|
308 |
-
"acc_stderr": 0.023576881744005716,
|
309 |
-
"acc_norm": 0.7654320987654321,
|
310 |
-
"acc_norm_stderr": 0.023576881744005716
|
311 |
-
},
|
312 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
313 |
-
"acc": 0.48226950354609927,
|
314 |
-
"acc_stderr": 0.02980873964223777,
|
315 |
-
"acc_norm": 0.48226950354609927,
|
316 |
-
"acc_norm_stderr": 0.02980873964223777
|
317 |
-
},
|
318 |
-
"harness|hendrycksTest-professional_law|5": {
|
319 |
-
"acc": 0.49608865710560623,
|
320 |
-
"acc_stderr": 0.012769845366441192,
|
321 |
-
"acc_norm": 0.49608865710560623,
|
322 |
-
"acc_norm_stderr": 0.012769845366441192
|
323 |
-
},
|
324 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
325 |
-
"acc": 0.7352941176470589,
|
326 |
-
"acc_stderr": 0.026799562024887664,
|
327 |
-
"acc_norm": 0.7352941176470589,
|
328 |
-
"acc_norm_stderr": 0.026799562024887664
|
329 |
-
},
|
330 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
331 |
-
"acc": 0.7058823529411765,
|
332 |
-
"acc_stderr": 0.018433427649401903,
|
333 |
-
"acc_norm": 0.7058823529411765,
|
334 |
-
"acc_norm_stderr": 0.018433427649401903
|
335 |
-
},
|
336 |
-
"harness|hendrycksTest-public_relations|5": {
|
337 |
-
"acc": 0.7,
|
338 |
-
"acc_stderr": 0.04389311454644286,
|
339 |
-
"acc_norm": 0.7,
|
340 |
-
"acc_norm_stderr": 0.04389311454644286
|
341 |
-
},
|
342 |
-
"harness|hendrycksTest-security_studies|5": {
|
343 |
-
"acc": 0.7714285714285715,
|
344 |
-
"acc_stderr": 0.026882144922307744,
|
345 |
-
"acc_norm": 0.7714285714285715,
|
346 |
-
"acc_norm_stderr": 0.026882144922307744
|
347 |
-
},
|
348 |
-
"harness|hendrycksTest-sociology|5": {
|
349 |
-
"acc": 0.8756218905472637,
|
350 |
-
"acc_stderr": 0.023335401790166327,
|
351 |
-
"acc_norm": 0.8756218905472637,
|
352 |
-
"acc_norm_stderr": 0.023335401790166327
|
353 |
-
},
|
354 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
355 |
-
"acc": 0.89,
|
356 |
-
"acc_stderr": 0.03144660377352203,
|
357 |
-
"acc_norm": 0.89,
|
358 |
-
"acc_norm_stderr": 0.03144660377352203
|
359 |
-
},
|
360 |
-
"harness|hendrycksTest-virology|5": {
|
361 |
-
"acc": 0.5301204819277109,
|
362 |
-
"acc_stderr": 0.03885425420866767,
|
363 |
-
"acc_norm": 0.5301204819277109,
|
364 |
-
"acc_norm_stderr": 0.03885425420866767
|
365 |
-
},
|
366 |
-
"harness|hendrycksTest-world_religions|5": {
|
367 |
-
"acc": 0.8304093567251462,
|
368 |
-
"acc_stderr": 0.028782108105401705,
|
369 |
-
"acc_norm": 0.8304093567251462,
|
370 |
-
"acc_norm_stderr": 0.028782108105401705
|
371 |
-
},
|
372 |
-
"harness|truthfulqa:mc|0": {
|
373 |
-
"mc1": 0.3047735618115055,
|
374 |
-
"mc1_stderr": 0.016114124156882455,
|
375 |
-
"mc2": 0.4503940613910581,
|
376 |
-
"mc2_stderr": 0.014223702771748893
|
377 |
-
},
|
378 |
-
"harness|winogrande|5": {
|
379 |
-
"acc": 0.8366219415943172,
|
380 |
-
"acc_stderr": 0.010390695970273764
|
381 |
-
},
|
382 |
-
"harness|gsm8k|5": {
|
383 |
-
"acc": 0.5549658832448825,
|
384 |
-
"acc_stderr": 0.013689011567414202
|
385 |
-
},
|
386 |
-
"all": {
|
387 |
-
"acc": 0.6550126565893573,
|
388 |
-
"acc_stderr": 0.031671818397293244,
|
389 |
-
"acc_norm": 0.6574305406535094,
|
390 |
-
"acc_norm_stderr": 0.03231451555422857,
|
391 |
-
"mc1": 0.3047735618115055,
|
392 |
-
"mc1_stderr": 0.016114124156882455,
|
393 |
-
"mc2": 0.4503940613910581,
|
394 |
-
"mc2_stderr": 0.014223702771748893
|
395 |
-
}
|
396 |
-
},
|
397 |
-
"versions": {
|
398 |
-
"all": 0,
|
399 |
-
"harness|arc:challenge|25": 0,
|
400 |
-
"harness|gsm8k|5": 0,
|
401 |
-
"harness|hellaswag|10": 0,
|
402 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
403 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
404 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
405 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
406 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
407 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
408 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
409 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
410 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
411 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
412 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
413 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
414 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
415 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
416 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
417 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
418 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
419 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
420 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
421 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
422 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
423 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
424 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
425 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
426 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
427 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
428 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
429 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
430 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
431 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
432 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
433 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
434 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
435 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
436 |
-
"harness|hendrycksTest-international_law|5": 1,
|
437 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
438 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
439 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
440 |
-
"harness|hendrycksTest-management|5": 1,
|
441 |
-
"harness|hendrycksTest-marketing|5": 1,
|
442 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
443 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
444 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
445 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
446 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
447 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
448 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
449 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
450 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
451 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
452 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
453 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
454 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
455 |
-
"harness|hendrycksTest-sociology|5": 1,
|
456 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
457 |
-
"harness|hendrycksTest-virology|5": 1,
|
458 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
459 |
-
"harness|truthfulqa:mc|0": 1,
|
460 |
-
"harness|winogrande|5": 0
|
461 |
-
},
|
462 |
-
"config_tasks": {
|
463 |
-
"harness|arc:challenge": "LM Harness task",
|
464 |
-
"harness|gsm8k": "LM Harness task",
|
465 |
-
"harness|hellaswag": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
507 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
508 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
509 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
510 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
511 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
512 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
513 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
514 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
515 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
516 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
517 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
518 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
519 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
520 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
521 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
522 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
523 |
-
"harness|truthfulqa:mc": "LM Harness task",
|
524 |
-
"harness|winogrande": "LM Harness task"
|
525 |
-
},
|
526 |
-
"summary_tasks": {
|
527 |
-
"harness|arc:challenge|25": {
|
528 |
-
"hashes": {
|
529 |
-
"hash_examples": "17b0cae357c0259e",
|
530 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
531 |
-
"hash_input_tokens": "9bcd0d1d37471713",
|
532 |
-
"hash_cont_tokens": "289aa98c400841d8"
|
533 |
-
},
|
534 |
-
"truncated": 0,
|
535 |
-
"non_truncated": 1172,
|
536 |
-
"padded": 4670,
|
537 |
-
"non_padded": 17,
|
538 |
-
"effective_few_shots": 25.0,
|
539 |
-
"num_truncated_few_shots": 0
|
540 |
-
},
|
541 |
-
"harness|hellaswag|10": {
|
542 |
-
"hashes": {
|
543 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
544 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
545 |
-
"hash_input_tokens": "80b8c6d79740318e",
|
546 |
-
"hash_cont_tokens": "ac460260c3e6efc9"
|
547 |
-
},
|
548 |
-
"truncated": 0,
|
549 |
-
"non_truncated": 10042,
|
550 |
-
"padded": 40101,
|
551 |
-
"non_padded": 67,
|
552 |
-
"effective_few_shots": 10.0,
|
553 |
-
"num_truncated_few_shots": 0
|
554 |
-
},
|
555 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
556 |
-
"hashes": {
|
557 |
-
"hash_examples": "280f9f325b40559a",
|
558 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
559 |
-
"hash_input_tokens": "b813d36287c6556c",
|
560 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
561 |
-
},
|
562 |
-
"truncated": 0,
|
563 |
-
"non_truncated": 100,
|
564 |
-
"padded": 400,
|
565 |
-
"non_padded": 0,
|
566 |
-
"effective_few_shots": 5.0,
|
567 |
-
"num_truncated_few_shots": 0
|
568 |
-
},
|
569 |
-
"harness|hendrycksTest-anatomy|5": {
|
570 |
-
"hashes": {
|
571 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
572 |
-
"hash_full_prompts": "516f74bef25df620",
|
573 |
-
"hash_input_tokens": "09dc2380497f7a47",
|
574 |
-
"hash_cont_tokens": "a52a4f60d98cbe5c"
|
575 |
-
},
|
576 |
-
"truncated": 0,
|
577 |
-
"non_truncated": 135,
|
578 |
-
"padded": 540,
|
579 |
-
"non_padded": 0,
|
580 |
-
"effective_few_shots": 5.0,
|
581 |
-
"num_truncated_few_shots": 0
|
582 |
-
},
|
583 |
-
"harness|hendrycksTest-astronomy|5": {
|
584 |
-
"hashes": {
|
585 |
-
"hash_examples": "7d587b908da4d762",
|
586 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
587 |
-
"hash_input_tokens": "68ca3220b0fdd1f3",
|
588 |
-
"hash_cont_tokens": "10f7d8eeba97841d"
|
589 |
-
},
|
590 |
-
"truncated": 0,
|
591 |
-
"non_truncated": 152,
|
592 |
-
"padded": 608,
|
593 |
-
"non_padded": 0,
|
594 |
-
"effective_few_shots": 5.0,
|
595 |
-
"num_truncated_few_shots": 0
|
596 |
-
},
|
597 |
-
"harness|hendrycksTest-business_ethics|5": {
|
598 |
-
"hashes": {
|
599 |
-
"hash_examples": "33e51740670de686",
|
600 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
601 |
-
"hash_input_tokens": "bd14ef1320de241e",
|
602 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
603 |
-
},
|
604 |
-
"truncated": 0,
|
605 |
-
"non_truncated": 100,
|
606 |
-
"padded": 400,
|
607 |
-
"non_padded": 0,
|
608 |
-
"effective_few_shots": 5.0,
|
609 |
-
"num_truncated_few_shots": 0
|
610 |
-
},
|
611 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
612 |
-
"hashes": {
|
613 |
-
"hash_examples": "f3366dbe7eefffa4",
|
614 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
615 |
-
"hash_input_tokens": "d96186ab98017c43",
|
616 |
-
"hash_cont_tokens": "edef9975ba9165b5"
|
617 |
-
},
|
618 |
-
"truncated": 0,
|
619 |
-
"non_truncated": 265,
|
620 |
-
"padded": 1060,
|
621 |
-
"non_padded": 0,
|
622 |
-
"effective_few_shots": 5.0,
|
623 |
-
"num_truncated_few_shots": 0
|
624 |
-
},
|
625 |
-
"harness|hendrycksTest-college_biology|5": {
|
626 |
-
"hashes": {
|
627 |
-
"hash_examples": "ca2b6753a0193e7f",
|
628 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
629 |
-
"hash_input_tokens": "424136b34e95b200",
|
630 |
-
"hash_cont_tokens": "0aa103ec6602280b"
|
631 |
-
},
|
632 |
-
"truncated": 0,
|
633 |
-
"non_truncated": 144,
|
634 |
-
"padded": 576,
|
635 |
-
"non_padded": 0,
|
636 |
-
"effective_few_shots": 5.0,
|
637 |
-
"num_truncated_few_shots": 0
|
638 |
-
},
|
639 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
640 |
-
"hashes": {
|
641 |
-
"hash_examples": "22ff85f1d34f42d1",
|
642 |
-
"hash_full_prompts": "242c9be6da583e95",
|
643 |
-
"hash_input_tokens": "8dd8b80e336bbe54",
|
644 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
645 |
-
},
|
646 |
-
"truncated": 0,
|
647 |
-
"non_truncated": 100,
|
648 |
-
"padded": 400,
|
649 |
-
"non_padded": 0,
|
650 |
-
"effective_few_shots": 5.0,
|
651 |
-
"num_truncated_few_shots": 0
|
652 |
-
},
|
653 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
654 |
-
"hashes": {
|
655 |
-
"hash_examples": "30318289d717a5cf",
|
656 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
657 |
-
"hash_input_tokens": "145d4cef8ca2261d",
|
658 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
659 |
-
},
|
660 |
-
"truncated": 0,
|
661 |
-
"non_truncated": 100,
|
662 |
-
"padded": 400,
|
663 |
-
"non_padded": 0,
|
664 |
-
"effective_few_shots": 5.0,
|
665 |
-
"num_truncated_few_shots": 0
|
666 |
-
},
|
667 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
668 |
-
"hashes": {
|
669 |
-
"hash_examples": "4944d1f0b6b5d911",
|
670 |
-
"hash_full_prompts": "770bc4281c973190",
|
671 |
-
"hash_input_tokens": "561995d32d2b25c4",
|
672 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
673 |
-
},
|
674 |
-
"truncated": 0,
|
675 |
-
"non_truncated": 100,
|
676 |
-
"padded": 400,
|
677 |
-
"non_padded": 0,
|
678 |
-
"effective_few_shots": 5.0,
|
679 |
-
"num_truncated_few_shots": 0
|
680 |
-
},
|
681 |
-
"harness|hendrycksTest-college_medicine|5": {
|
682 |
-
"hashes": {
|
683 |
-
"hash_examples": "dd69cc33381275af",
|
684 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
685 |
-
"hash_input_tokens": "6a258a9d4418599c",
|
686 |
-
"hash_cont_tokens": "1979021dbc698754"
|
687 |
-
},
|
688 |
-
"truncated": 0,
|
689 |
-
"non_truncated": 173,
|
690 |
-
"padded": 692,
|
691 |
-
"non_padded": 0,
|
692 |
-
"effective_few_shots": 5.0,
|
693 |
-
"num_truncated_few_shots": 0
|
694 |
-
},
|
695 |
-
"harness|hendrycksTest-college_physics|5": {
|
696 |
-
"hashes": {
|
697 |
-
"hash_examples": "875dd26d22655b0d",
|
698 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
699 |
-
"hash_input_tokens": "fa5e0d5b5f97b66a",
|
700 |
-
"hash_cont_tokens": "7cf7fe2bab00acbd"
|
701 |
-
},
|
702 |
-
"truncated": 0,
|
703 |
-
"non_truncated": 102,
|
704 |
-
"padded": 408,
|
705 |
-
"non_padded": 0,
|
706 |
-
"effective_few_shots": 5.0,
|
707 |
-
"num_truncated_few_shots": 0
|
708 |
-
},
|
709 |
-
"harness|hendrycksTest-computer_security|5": {
|
710 |
-
"hashes": {
|
711 |
-
"hash_examples": "006451eedc0ededb",
|
712 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
713 |
-
"hash_input_tokens": "07d27397edfae492",
|
714 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
715 |
-
},
|
716 |
-
"truncated": 0,
|
717 |
-
"non_truncated": 100,
|
718 |
-
"padded": 400,
|
719 |
-
"non_padded": 0,
|
720 |
-
"effective_few_shots": 5.0,
|
721 |
-
"num_truncated_few_shots": 0
|
722 |
-
},
|
723 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
724 |
-
"hashes": {
|
725 |
-
"hash_examples": "8874ece872d2ca4c",
|
726 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
727 |
-
"hash_input_tokens": "da5e6c3c8eb17da6",
|
728 |
-
"hash_cont_tokens": "903f64eed2b0d217"
|
729 |
-
},
|
730 |
-
"truncated": 0,
|
731 |
-
"non_truncated": 235,
|
732 |
-
"padded": 940,
|
733 |
-
"non_padded": 0,
|
734 |
-
"effective_few_shots": 5.0,
|
735 |
-
"num_truncated_few_shots": 0
|
736 |
-
},
|
737 |
-
"harness|hendrycksTest-econometrics|5": {
|
738 |
-
"hashes": {
|
739 |
-
"hash_examples": "64d3623b0bfaa43f",
|
740 |
-
"hash_full_prompts": "612f340fae41338d",
|
741 |
-
"hash_input_tokens": "f6ba8e358bdb523e",
|
742 |
-
"hash_cont_tokens": "721ae6c5302c4bf2"
|
743 |
-
},
|
744 |
-
"truncated": 0,
|
745 |
-
"non_truncated": 114,
|
746 |
-
"padded": 456,
|
747 |
-
"non_padded": 0,
|
748 |
-
"effective_few_shots": 5.0,
|
749 |
-
"num_truncated_few_shots": 0
|
750 |
-
},
|
751 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
752 |
-
"hashes": {
|
753 |
-
"hash_examples": "e98f51780c674d7e",
|
754 |
-
"hash_full_prompts": "10275b312d812ae6",
|
755 |
-
"hash_input_tokens": "b2459da4c5ca8590",
|
756 |
-
"hash_cont_tokens": "15a738960ed3e587"
|
757 |
-
},
|
758 |
-
"truncated": 0,
|
759 |
-
"non_truncated": 145,
|
760 |
-
"padded": 575,
|
761 |
-
"non_padded": 5,
|
762 |
-
"effective_few_shots": 5.0,
|
763 |
-
"num_truncated_few_shots": 0
|
764 |
-
},
|
765 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
766 |
-
"hashes": {
|
767 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
768 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
769 |
-
"hash_input_tokens": "0b969d9ad706a13a",
|
770 |
-
"hash_cont_tokens": "c96470462fc71683"
|
771 |
-
},
|
772 |
-
"truncated": 0,
|
773 |
-
"non_truncated": 378,
|
774 |
-
"padded": 1512,
|
775 |
-
"non_padded": 0,
|
776 |
-
"effective_few_shots": 5.0,
|
777 |
-
"num_truncated_few_shots": 0
|
778 |
-
},
|
779 |
-
"harness|hendrycksTest-formal_logic|5": {
|
780 |
-
"hashes": {
|
781 |
-
"hash_examples": "5a6525665f63ea72",
|
782 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
783 |
-
"hash_input_tokens": "02bc3eb5f90da86e",
|
784 |
-
"hash_cont_tokens": "0e1ce025c9d6ee7e"
|
785 |
-
},
|
786 |
-
"truncated": 0,
|
787 |
-
"non_truncated": 126,
|
788 |
-
"padded": 504,
|
789 |
-
"non_padded": 0,
|
790 |
-
"effective_few_shots": 5.0,
|
791 |
-
"num_truncated_few_shots": 0
|
792 |
-
},
|
793 |
-
"harness|hendrycksTest-global_facts|5": {
|
794 |
-
"hashes": {
|
795 |
-
"hash_examples": "371d70d743b2b89b",
|
796 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
797 |
-
"hash_input_tokens": "3d5106918bcbeb43",
|
798 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
799 |
-
},
|
800 |
-
"truncated": 0,
|
801 |
-
"non_truncated": 100,
|
802 |
-
"padded": 400,
|
803 |
-
"non_padded": 0,
|
804 |
-
"effective_few_shots": 5.0,
|
805 |
-
"num_truncated_few_shots": 0
|
806 |
-
},
|
807 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
808 |
-
"hashes": {
|
809 |
-
"hash_examples": "a79e1018b1674052",
|
810 |
-
"hash_full_prompts": "e624e26ede922561",
|
811 |
-
"hash_input_tokens": "7b089392db2dabbd",
|
812 |
-
"hash_cont_tokens": "e34d57f7d3c4ca16"
|
813 |
-
},
|
814 |
-
"truncated": 0,
|
815 |
-
"non_truncated": 310,
|
816 |
-
"padded": 1240,
|
817 |
-
"non_padded": 0,
|
818 |
-
"effective_few_shots": 5.0,
|
819 |
-
"num_truncated_few_shots": 0
|
820 |
-
},
|
821 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
822 |
-
"hashes": {
|
823 |
-
"hash_examples": "44bfc25c389f0e03",
|
824 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
825 |
-
"hash_input_tokens": "ba90b2ffed1c067d",
|
826 |
-
"hash_cont_tokens": "e8482d44df4b3740"
|
827 |
-
},
|
828 |
-
"truncated": 0,
|
829 |
-
"non_truncated": 203,
|
830 |
-
"padded": 812,
|
831 |
-
"non_padded": 0,
|
832 |
-
"effective_few_shots": 5.0,
|
833 |
-
"num_truncated_few_shots": 0
|
834 |
-
},
|
835 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
836 |
-
"hashes": {
|
837 |
-
"hash_examples": "8b8cdb1084f24169",
|
838 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
839 |
-
"hash_input_tokens": "60eeec309ef0717f",
|
840 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
841 |
-
},
|
842 |
-
"truncated": 0,
|
843 |
-
"non_truncated": 100,
|
844 |
-
"padded": 400,
|
845 |
-
"non_padded": 0,
|
846 |
-
"effective_few_shots": 5.0,
|
847 |
-
"num_truncated_few_shots": 0
|
848 |
-
},
|
849 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
850 |
-
"hashes": {
|
851 |
-
"hash_examples": "11cd32d0ef440171",
|
852 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
853 |
-
"hash_input_tokens": "5e5e8bf3808e0ead",
|
854 |
-
"hash_cont_tokens": "d63e679a49418339"
|
855 |
-
},
|
856 |
-
"truncated": 0,
|
857 |
-
"non_truncated": 165,
|
858 |
-
"padded": 656,
|
859 |
-
"non_padded": 4,
|
860 |
-
"effective_few_shots": 5.0,
|
861 |
-
"num_truncated_few_shots": 0
|
862 |
-
},
|
863 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
864 |
-
"hashes": {
|
865 |
-
"hash_examples": "b60019b9e80b642f",
|
866 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
867 |
-
"hash_input_tokens": "4da9b741d4e7ea78",
|
868 |
-
"hash_cont_tokens": "d78483e286d06f1a"
|
869 |
-
},
|
870 |
-
"truncated": 0,
|
871 |
-
"non_truncated": 198,
|
872 |
-
"padded": 792,
|
873 |
-
"non_padded": 0,
|
874 |
-
"effective_few_shots": 5.0,
|
875 |
-
"num_truncated_few_shots": 0
|
876 |
-
},
|
877 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
878 |
-
"hashes": {
|
879 |
-
"hash_examples": "d221ec983d143dc3",
|
880 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
881 |
-
"hash_input_tokens": "acb4bc872ac86ed7",
|
882 |
-
"hash_cont_tokens": "691cdff71ff5fe57"
|
883 |
-
},
|
884 |
-
"truncated": 0,
|
885 |
-
"non_truncated": 193,
|
886 |
-
"padded": 772,
|
887 |
-
"non_padded": 0,
|
888 |
-
"effective_few_shots": 5.0,
|
889 |
-
"num_truncated_few_shots": 0
|
890 |
-
},
|
891 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
892 |
-
"hashes": {
|
893 |
-
"hash_examples": "59c2915cacfd3fbb",
|
894 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
895 |
-
"hash_input_tokens": "840fc6403eb69ab0",
|
896 |
-
"hash_cont_tokens": "d5ad4c5bdca967ad"
|
897 |
-
},
|
898 |
-
"truncated": 0,
|
899 |
-
"non_truncated": 390,
|
900 |
-
"padded": 1560,
|
901 |
-
"non_padded": 0,
|
902 |
-
"effective_few_shots": 5.0,
|
903 |
-
"num_truncated_few_shots": 0
|
904 |
-
},
|
905 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
906 |
-
"hashes": {
|
907 |
-
"hash_examples": "1f8ac897608de342",
|
908 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
909 |
-
"hash_input_tokens": "3629a7f2cd17faeb",
|
910 |
-
"hash_cont_tokens": "8f631ca5687dd0d4"
|
911 |
-
},
|
912 |
-
"truncated": 0,
|
913 |
-
"non_truncated": 270,
|
914 |
-
"padded": 1080,
|
915 |
-
"non_padded": 0,
|
916 |
-
"effective_few_shots": 5.0,
|
917 |
-
"num_truncated_few_shots": 0
|
918 |
-
},
|
919 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
920 |
-
"hashes": {
|
921 |
-
"hash_examples": "ead6a0f2f6c83370",
|
922 |
-
"hash_full_prompts": "bfc393381298609e",
|
923 |
-
"hash_input_tokens": "6846f684260e3997",
|
924 |
-
"hash_cont_tokens": "7321048a28451473"
|
925 |
-
},
|
926 |
-
"truncated": 0,
|
927 |
-
"non_truncated": 238,
|
928 |
-
"padded": 952,
|
929 |
-
"non_padded": 0,
|
930 |
-
"effective_few_shots": 5.0,
|
931 |
-
"num_truncated_few_shots": 0
|
932 |
-
},
|
933 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
934 |
-
"hashes": {
|
935 |
-
"hash_examples": "c3f2025990afec64",
|
936 |
-
"hash_full_prompts": "fc78b4997e436734",
|
937 |
-
"hash_input_tokens": "85aee25d6bdad94a",
|
938 |
-
"hash_cont_tokens": "bb137581f269861c"
|
939 |
-
},
|
940 |
-
"truncated": 0,
|
941 |
-
"non_truncated": 151,
|
942 |
-
"padded": 604,
|
943 |
-
"non_padded": 0,
|
944 |
-
"effective_few_shots": 5.0,
|
945 |
-
"num_truncated_few_shots": 0
|
946 |
-
},
|
947 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
948 |
-
"hashes": {
|
949 |
-
"hash_examples": "21f8aab618f6d636",
|
950 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
951 |
-
"hash_input_tokens": "290b66d6d666a35f",
|
952 |
-
"hash_cont_tokens": "b455cab2675bd863"
|
953 |
-
},
|
954 |
-
"truncated": 0,
|
955 |
-
"non_truncated": 545,
|
956 |
-
"padded": 2180,
|
957 |
-
"non_padded": 0,
|
958 |
-
"effective_few_shots": 5.0,
|
959 |
-
"num_truncated_few_shots": 0
|
960 |
-
},
|
961 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
962 |
-
"hashes": {
|
963 |
-
"hash_examples": "2386a60a11fc5de3",
|
964 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
965 |
-
"hash_input_tokens": "a77a7668b437bc82",
|
966 |
-
"hash_cont_tokens": "1b3196fec7e58037"
|
967 |
-
},
|
968 |
-
"truncated": 0,
|
969 |
-
"non_truncated": 216,
|
970 |
-
"padded": 864,
|
971 |
-
"non_padded": 0,
|
972 |
-
"effective_few_shots": 5.0,
|
973 |
-
"num_truncated_few_shots": 0
|
974 |
-
},
|
975 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
976 |
-
"hashes": {
|
977 |
-
"hash_examples": "74961543be40f04f",
|
978 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
979 |
-
"hash_input_tokens": "63548c7fa9ba7a78",
|
980 |
-
"hash_cont_tokens": "a331dedc2aa01b3e"
|
981 |
-
},
|
982 |
-
"truncated": 0,
|
983 |
-
"non_truncated": 204,
|
984 |
-
"padded": 816,
|
985 |
-
"non_padded": 0,
|
986 |
-
"effective_few_shots": 5.0,
|
987 |
-
"num_truncated_few_shots": 0
|
988 |
-
},
|
989 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
990 |
-
"hashes": {
|
991 |
-
"hash_examples": "2ad2f6b7198b2234",
|
992 |
-
"hash_full_prompts": "11845057459afd72",
|
993 |
-
"hash_input_tokens": "83c5da18bfa50812",
|
994 |
-
"hash_cont_tokens": "d0fbe030b8c8c2bf"
|
995 |
-
},
|
996 |
-
"truncated": 0,
|
997 |
-
"non_truncated": 237,
|
998 |
-
"padded": 948,
|
999 |
-
"non_padded": 0,
|
1000 |
-
"effective_few_shots": 5.0,
|
1001 |
-
"num_truncated_few_shots": 0
|
1002 |
-
},
|
1003 |
-
"harness|hendrycksTest-human_aging|5": {
|
1004 |
-
"hashes": {
|
1005 |
-
"hash_examples": "1a7199dc733e779b",
|
1006 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
1007 |
-
"hash_input_tokens": "bebbd11f22006685",
|
1008 |
-
"hash_cont_tokens": "1dd29c3755494850"
|
1009 |
-
},
|
1010 |
-
"truncated": 0,
|
1011 |
-
"non_truncated": 223,
|
1012 |
-
"padded": 892,
|
1013 |
-
"non_padded": 0,
|
1014 |
-
"effective_few_shots": 5.0,
|
1015 |
-
"num_truncated_few_shots": 0
|
1016 |
-
},
|
1017 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1018 |
-
"hashes": {
|
1019 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1020 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1021 |
-
"hash_input_tokens": "7b85ee9b8ee54f4f",
|
1022 |
-
"hash_cont_tokens": "c85573f663c10691"
|
1023 |
-
},
|
1024 |
-
"truncated": 0,
|
1025 |
-
"non_truncated": 131,
|
1026 |
-
"padded": 524,
|
1027 |
-
"non_padded": 0,
|
1028 |
-
"effective_few_shots": 5.0,
|
1029 |
-
"num_truncated_few_shots": 0
|
1030 |
-
},
|
1031 |
-
"harness|hendrycksTest-international_law|5": {
|
1032 |
-
"hashes": {
|
1033 |
-
"hash_examples": "1300bfd0dfc59114",
|
1034 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1035 |
-
"hash_input_tokens": "7bfc55ab7065943e",
|
1036 |
-
"hash_cont_tokens": "d263804ba918154f"
|
1037 |
-
},
|
1038 |
-
"truncated": 0,
|
1039 |
-
"non_truncated": 121,
|
1040 |
-
"padded": 484,
|
1041 |
-
"non_padded": 0,
|
1042 |
-
"effective_few_shots": 5.0,
|
1043 |
-
"num_truncated_few_shots": 0
|
1044 |
-
},
|
1045 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1046 |
-
"hashes": {
|
1047 |
-
"hash_examples": "083b1e4904c48dc2",
|
1048 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1049 |
-
"hash_input_tokens": "69573f1675e053c6",
|
1050 |
-
"hash_cont_tokens": "581986691a84ece8"
|
1051 |
-
},
|
1052 |
-
"truncated": 0,
|
1053 |
-
"non_truncated": 108,
|
1054 |
-
"padded": 432,
|
1055 |
-
"non_padded": 0,
|
1056 |
-
"effective_few_shots": 5.0,
|
1057 |
-
"num_truncated_few_shots": 0
|
1058 |
-
},
|
1059 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1060 |
-
"hashes": {
|
1061 |
-
"hash_examples": "709128f9926a634c",
|
1062 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1063 |
-
"hash_input_tokens": "552324ef20094bdc",
|
1064 |
-
"hash_cont_tokens": "55a858b28bbda458"
|
1065 |
-
},
|
1066 |
-
"truncated": 0,
|
1067 |
-
"non_truncated": 163,
|
1068 |
-
"padded": 652,
|
1069 |
-
"non_padded": 0,
|
1070 |
-
"effective_few_shots": 5.0,
|
1071 |
-
"num_truncated_few_shots": 0
|
1072 |
-
},
|
1073 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1074 |
-
"hashes": {
|
1075 |
-
"hash_examples": "88f22a636029ae47",
|
1076 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1077 |
-
"hash_input_tokens": "96449357a7318905",
|
1078 |
-
"hash_cont_tokens": "e99d3d3efd4ac7a3"
|
1079 |
-
},
|
1080 |
-
"truncated": 0,
|
1081 |
-
"non_truncated": 112,
|
1082 |
-
"padded": 448,
|
1083 |
-
"non_padded": 0,
|
1084 |
-
"effective_few_shots": 5.0,
|
1085 |
-
"num_truncated_few_shots": 0
|
1086 |
-
},
|
1087 |
-
"harness|hendrycksTest-management|5": {
|
1088 |
-
"hashes": {
|
1089 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1090 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1091 |
-
"hash_input_tokens": "3b849249168e3b88",
|
1092 |
-
"hash_cont_tokens": "13d9dc56bca34726"
|
1093 |
-
},
|
1094 |
-
"truncated": 0,
|
1095 |
-
"non_truncated": 103,
|
1096 |
-
"padded": 412,
|
1097 |
-
"non_padded": 0,
|
1098 |
-
"effective_few_shots": 5.0,
|
1099 |
-
"num_truncated_few_shots": 0
|
1100 |
-
},
|
1101 |
-
"harness|hendrycksTest-marketing|5": {
|
1102 |
-
"hashes": {
|
1103 |
-
"hash_examples": "2668953431f91e96",
|
1104 |
-
"hash_full_prompts": "77562bef997c7650",
|
1105 |
-
"hash_input_tokens": "af0e186f2756b70d",
|
1106 |
-
"hash_cont_tokens": "2700ea26933916a2"
|
1107 |
-
},
|
1108 |
-
"truncated": 0,
|
1109 |
-
"non_truncated": 234,
|
1110 |
-
"padded": 936,
|
1111 |
-
"non_padded": 0,
|
1112 |
-
"effective_few_shots": 5.0,
|
1113 |
-
"num_truncated_few_shots": 0
|
1114 |
-
},
|
1115 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1116 |
-
"hashes": {
|
1117 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1118 |
-
"hash_full_prompts": "202139046daa118f",
|
1119 |
-
"hash_input_tokens": "9f6a6de16509b6d9",
|
1120 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1121 |
-
},
|
1122 |
-
"truncated": 0,
|
1123 |
-
"non_truncated": 100,
|
1124 |
-
"padded": 400,
|
1125 |
-
"non_padded": 0,
|
1126 |
-
"effective_few_shots": 5.0,
|
1127 |
-
"num_truncated_few_shots": 0
|
1128 |
-
},
|
1129 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1130 |
-
"hashes": {
|
1131 |
-
"hash_examples": "41adb694024809c2",
|
1132 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1133 |
-
"hash_input_tokens": "9194406d589f7c10",
|
1134 |
-
"hash_cont_tokens": "7bf4341c79587250"
|
1135 |
-
},
|
1136 |
-
"truncated": 0,
|
1137 |
-
"non_truncated": 783,
|
1138 |
-
"padded": 3132,
|
1139 |
-
"non_padded": 0,
|
1140 |
-
"effective_few_shots": 5.0,
|
1141 |
-
"num_truncated_few_shots": 0
|
1142 |
-
},
|
1143 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1144 |
-
"hashes": {
|
1145 |
-
"hash_examples": "3171c13ba3c594c4",
|
1146 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1147 |
-
"hash_input_tokens": "769486efc74d9f8e",
|
1148 |
-
"hash_cont_tokens": "38a48e9de6976f00"
|
1149 |
-
},
|
1150 |
-
"truncated": 0,
|
1151 |
-
"non_truncated": 346,
|
1152 |
-
"padded": 1384,
|
1153 |
-
"non_padded": 0,
|
1154 |
-
"effective_few_shots": 5.0,
|
1155 |
-
"num_truncated_few_shots": 0
|
1156 |
-
},
|
1157 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1158 |
-
"hashes": {
|
1159 |
-
"hash_examples": "9873e077e83e0546",
|
1160 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1161 |
-
"hash_input_tokens": "a90fd4dd90959dad",
|
1162 |
-
"hash_cont_tokens": "761c4dc187689d89"
|
1163 |
-
},
|
1164 |
-
"truncated": 0,
|
1165 |
-
"non_truncated": 895,
|
1166 |
-
"padded": 3580,
|
1167 |
-
"non_padded": 0,
|
1168 |
-
"effective_few_shots": 5.0,
|
1169 |
-
"num_truncated_few_shots": 0
|
1170 |
-
},
|
1171 |
-
"harness|hendrycksTest-nutrition|5": {
|
1172 |
-
"hashes": {
|
1173 |
-
"hash_examples": "7db1d8142ec14323",
|
1174 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1175 |
-
"hash_input_tokens": "1a3b843e66efd29b",
|
1176 |
-
"hash_cont_tokens": "65005bd7d6f6012a"
|
1177 |
-
},
|
1178 |
-
"truncated": 0,
|
1179 |
-
"non_truncated": 306,
|
1180 |
-
"padded": 1224,
|
1181 |
-
"non_padded": 0,
|
1182 |
-
"effective_few_shots": 5.0,
|
1183 |
-
"num_truncated_few_shots": 0
|
1184 |
-
},
|
1185 |
-
"harness|hendrycksTest-philosophy|5": {
|
1186 |
-
"hashes": {
|
1187 |
-
"hash_examples": "9b455b7d72811cc8",
|
1188 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1189 |
-
"hash_input_tokens": "09820001a3d00013",
|
1190 |
-
"hash_cont_tokens": "0b47934fb6314dec"
|
1191 |
-
},
|
1192 |
-
"truncated": 0,
|
1193 |
-
"non_truncated": 311,
|
1194 |
-
"padded": 1244,
|
1195 |
-
"non_padded": 0,
|
1196 |
-
"effective_few_shots": 5.0,
|
1197 |
-
"num_truncated_few_shots": 0
|
1198 |
-
},
|
1199 |
-
"harness|hendrycksTest-prehistory|5": {
|
1200 |
-
"hashes": {
|
1201 |
-
"hash_examples": "8be90d0f538f1560",
|
1202 |
-
"hash_full_prompts": "152187949bcd0921",
|
1203 |
-
"hash_input_tokens": "7c4ec364ce2768c7",
|
1204 |
-
"hash_cont_tokens": "3f20acd855ee0a29"
|
1205 |
-
},
|
1206 |
-
"truncated": 0,
|
1207 |
-
"non_truncated": 324,
|
1208 |
-
"padded": 1296,
|
1209 |
-
"non_padded": 0,
|
1210 |
-
"effective_few_shots": 5.0,
|
1211 |
-
"num_truncated_few_shots": 0
|
1212 |
-
},
|
1213 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1214 |
-
"hashes": {
|
1215 |
-
"hash_examples": "8d377597916cd07e",
|
1216 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1217 |
-
"hash_input_tokens": "ced0534574d0ae3f",
|
1218 |
-
"hash_cont_tokens": "8f122ba881355d4b"
|
1219 |
-
},
|
1220 |
-
"truncated": 0,
|
1221 |
-
"non_truncated": 282,
|
1222 |
-
"padded": 1128,
|
1223 |
-
"non_padded": 0,
|
1224 |
-
"effective_few_shots": 5.0,
|
1225 |
-
"num_truncated_few_shots": 0
|
1226 |
-
},
|
1227 |
-
"harness|hendrycksTest-professional_law|5": {
|
1228 |
-
"hashes": {
|
1229 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1230 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1231 |
-
"hash_input_tokens": "bcbdbbde22ec73e3",
|
1232 |
-
"hash_cont_tokens": "90d5df417c4d3fd3"
|
1233 |
-
},
|
1234 |
-
"truncated": 0,
|
1235 |
-
"non_truncated": 1534,
|
1236 |
-
"padded": 6136,
|
1237 |
-
"non_padded": 0,
|
1238 |
-
"effective_few_shots": 5.0,
|
1239 |
-
"num_truncated_few_shots": 0
|
1240 |
-
},
|
1241 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1242 |
-
"hashes": {
|
1243 |
-
"hash_examples": "b20e4e816c1e383e",
|
1244 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1245 |
-
"hash_input_tokens": "c54d753563114d45",
|
1246 |
-
"hash_cont_tokens": "4a2d2988884f7f70"
|
1247 |
-
},
|
1248 |
-
"truncated": 0,
|
1249 |
-
"non_truncated": 272,
|
1250 |
-
"padded": 1088,
|
1251 |
-
"non_padded": 0,
|
1252 |
-
"effective_few_shots": 5.0,
|
1253 |
-
"num_truncated_few_shots": 0
|
1254 |
-
},
|
1255 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1256 |
-
"hashes": {
|
1257 |
-
"hash_examples": "d45b73b22f9cc039",
|
1258 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1259 |
-
"hash_input_tokens": "b75dc55c0e32fa52",
|
1260 |
-
"hash_cont_tokens": "e0a952cb8a9c81de"
|
1261 |
-
},
|
1262 |
-
"truncated": 0,
|
1263 |
-
"non_truncated": 612,
|
1264 |
-
"padded": 2448,
|
1265 |
-
"non_padded": 0,
|
1266 |
-
"effective_few_shots": 5.0,
|
1267 |
-
"num_truncated_few_shots": 0
|
1268 |
-
},
|
1269 |
-
"harness|hendrycksTest-public_relations|5": {
|
1270 |
-
"hashes": {
|
1271 |
-
"hash_examples": "0d25072e1761652a",
|
1272 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1273 |
-
"hash_input_tokens": "5ccdc8ec8db99622",
|
1274 |
-
"hash_cont_tokens": "1fa77a8dff3922b8"
|
1275 |
-
},
|
1276 |
-
"truncated": 0,
|
1277 |
-
"non_truncated": 110,
|
1278 |
-
"padded": 440,
|
1279 |
-
"non_padded": 0,
|
1280 |
-
"effective_few_shots": 5.0,
|
1281 |
-
"num_truncated_few_shots": 0
|
1282 |
-
},
|
1283 |
-
"harness|hendrycksTest-security_studies|5": {
|
1284 |
-
"hashes": {
|
1285 |
-
"hash_examples": "62bb8197e63d60d4",
|
1286 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1287 |
-
"hash_input_tokens": "ca8497342e5b1d57",
|
1288 |
-
"hash_cont_tokens": "81fc9cb3cbdd52db"
|
1289 |
-
},
|
1290 |
-
"truncated": 0,
|
1291 |
-
"non_truncated": 245,
|
1292 |
-
"padded": 980,
|
1293 |
-
"non_padded": 0,
|
1294 |
-
"effective_few_shots": 5.0,
|
1295 |
-
"num_truncated_few_shots": 0
|
1296 |
-
},
|
1297 |
-
"harness|hendrycksTest-sociology|5": {
|
1298 |
-
"hashes": {
|
1299 |
-
"hash_examples": "e7959df87dea8672",
|
1300 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1301 |
-
"hash_input_tokens": "069c76424fbd3dab",
|
1302 |
-
"hash_cont_tokens": "2a0493252ed2cf43"
|
1303 |
-
},
|
1304 |
-
"truncated": 0,
|
1305 |
-
"non_truncated": 201,
|
1306 |
-
"padded": 804,
|
1307 |
-
"non_padded": 0,
|
1308 |
-
"effective_few_shots": 5.0,
|
1309 |
-
"num_truncated_few_shots": 0
|
1310 |
-
},
|
1311 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1312 |
-
"hashes": {
|
1313 |
-
"hash_examples": "4a56a01ddca44dca",
|
1314 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1315 |
-
"hash_input_tokens": "a7e393a626169576",
|
1316 |
-
"hash_cont_tokens": "17b868b63507f9a3"
|
1317 |
-
},
|
1318 |
-
"truncated": 0,
|
1319 |
-
"non_truncated": 100,
|
1320 |
-
"padded": 400,
|
1321 |
-
"non_padded": 0,
|
1322 |
-
"effective_few_shots": 5.0,
|
1323 |
-
"num_truncated_few_shots": 0
|
1324 |
-
},
|
1325 |
-
"harness|hendrycksTest-virology|5": {
|
1326 |
-
"hashes": {
|
1327 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1328 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1329 |
-
"hash_input_tokens": "bf99dc973e3a650d",
|
1330 |
-
"hash_cont_tokens": "5ab892d003b00c98"
|
1331 |
-
},
|
1332 |
-
"truncated": 0,
|
1333 |
-
"non_truncated": 166,
|
1334 |
-
"padded": 664,
|
1335 |
-
"non_padded": 0,
|
1336 |
-
"effective_few_shots": 5.0,
|
1337 |
-
"num_truncated_few_shots": 0
|
1338 |
-
},
|
1339 |
-
"harness|hendrycksTest-world_religions|5": {
|
1340 |
-
"hashes": {
|
1341 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1342 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1343 |
-
"hash_input_tokens": "1761cfaf21797065",
|
1344 |
-
"hash_cont_tokens": "15a5e5dbdfbb8568"
|
1345 |
-
},
|
1346 |
-
"truncated": 0,
|
1347 |
-
"non_truncated": 171,
|
1348 |
-
"padded": 684,
|
1349 |
-
"non_padded": 0,
|
1350 |
-
"effective_few_shots": 5.0,
|
1351 |
-
"num_truncated_few_shots": 0
|
1352 |
-
},
|
1353 |
-
"harness|truthfulqa:mc|0": {
|
1354 |
-
"hashes": {
|
1355 |
-
"hash_examples": "23176c0531c7b867",
|
1356 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1357 |
-
"hash_input_tokens": "298b43914bbdf4ca",
|
1358 |
-
"hash_cont_tokens": "5a8d4bb398b1c3c0"
|
1359 |
-
},
|
1360 |
-
"truncated": 0,
|
1361 |
-
"non_truncated": 817,
|
1362 |
-
"padded": 9996,
|
1363 |
-
"non_padded": 0,
|
1364 |
-
"effective_few_shots": 0.0,
|
1365 |
-
"num_truncated_few_shots": 0
|
1366 |
-
},
|
1367 |
-
"harness|winogrande|5": {
|
1368 |
-
"hashes": {
|
1369 |
-
"hash_examples": "aada0a176fd81218",
|
1370 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
1371 |
-
"hash_input_tokens": "31aa3477d959f771",
|
1372 |
-
"hash_cont_tokens": "618558fb93c0f288"
|
1373 |
-
},
|
1374 |
-
"truncated": 0,
|
1375 |
-
"non_truncated": 1267,
|
1376 |
-
"padded": 2534,
|
1377 |
-
"non_padded": 0,
|
1378 |
-
"effective_few_shots": 5.0,
|
1379 |
-
"num_truncated_few_shots": 0
|
1380 |
-
},
|
1381 |
-
"harness|gsm8k|5": {
|
1382 |
-
"hashes": {
|
1383 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
1384 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
1385 |
-
"hash_input_tokens": "6af0ae8cfe684f50",
|
1386 |
-
"hash_cont_tokens": "01374f708aeddf82"
|
1387 |
-
},
|
1388 |
-
"truncated": 0,
|
1389 |
-
"non_truncated": 1319,
|
1390 |
-
"padded": 0,
|
1391 |
-
"non_padded": 1319,
|
1392 |
-
"effective_few_shots": 5.0,
|
1393 |
-
"num_truncated_few_shots": 0
|
1394 |
-
}
|
1395 |
-
},
|
1396 |
-
"summary_general": {
|
1397 |
-
"hashes": {
|
1398 |
-
"hash_examples": "3b7fa57a057f9415",
|
1399 |
-
"hash_full_prompts": "63615fc50fc9417c",
|
1400 |
-
"hash_input_tokens": "9c04e828ae29cacc",
|
1401 |
-
"hash_cont_tokens": "4b585fc841a0e774"
|
1402 |
-
},
|
1403 |
-
"truncated": 0,
|
1404 |
-
"non_truncated": 28659,
|
1405 |
-
"padded": 113460,
|
1406 |
-
"non_padded": 1412,
|
1407 |
-
"num_truncated_few_shots": 0
|
1408 |
-
}
|
1409 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-30b-instruct-2048/results_2023-07-19T12-29-43.161348.json
DELETED
@@ -1,871 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|arc:challenge|25": {
|
4 |
-
"acc": 0.6203071672354948,
|
5 |
-
"acc_stderr": 0.014182119866974872,
|
6 |
-
"acc_norm": 0.6493174061433447,
|
7 |
-
"acc_norm_stderr": 0.013944635930726094
|
8 |
-
},
|
9 |
-
"harness|hellaswag|10": {
|
10 |
-
"acc": 0.6509659430392352,
|
11 |
-
"acc_stderr": 0.004756905819649974,
|
12 |
-
"acc_norm": 0.8494323839872535,
|
13 |
-
"acc_norm_stderr": 0.003568960247101693
|
14 |
-
},
|
15 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
16 |
-
"acc": 0.35,
|
17 |
-
"acc_stderr": 0.0479372485441102,
|
18 |
-
"acc_norm": 0.35,
|
19 |
-
"acc_norm_stderr": 0.0479372485441102
|
20 |
-
},
|
21 |
-
"harness|hendrycksTest-anatomy|5": {
|
22 |
-
"acc": 0.5481481481481482,
|
23 |
-
"acc_stderr": 0.04299268905480864,
|
24 |
-
"acc_norm": 0.5481481481481482,
|
25 |
-
"acc_norm_stderr": 0.04299268905480864
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-astronomy|5": {
|
28 |
-
"acc": 0.6907894736842105,
|
29 |
-
"acc_stderr": 0.037610708698674805,
|
30 |
-
"acc_norm": 0.6907894736842105,
|
31 |
-
"acc_norm_stderr": 0.037610708698674805
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-business_ethics|5": {
|
34 |
-
"acc": 0.62,
|
35 |
-
"acc_stderr": 0.048783173121456316,
|
36 |
-
"acc_norm": 0.62,
|
37 |
-
"acc_norm_stderr": 0.048783173121456316
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
40 |
-
"acc": 0.6566037735849056,
|
41 |
-
"acc_stderr": 0.02922452646912479,
|
42 |
-
"acc_norm": 0.6566037735849056,
|
43 |
-
"acc_norm_stderr": 0.02922452646912479
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-college_biology|5": {
|
46 |
-
"acc": 0.6597222222222222,
|
47 |
-
"acc_stderr": 0.039621355734862175,
|
48 |
-
"acc_norm": 0.6597222222222222,
|
49 |
-
"acc_norm_stderr": 0.039621355734862175
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
52 |
-
"acc": 0.42,
|
53 |
-
"acc_stderr": 0.049604496374885836,
|
54 |
-
"acc_norm": 0.42,
|
55 |
-
"acc_norm_stderr": 0.049604496374885836
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
58 |
-
"acc": 0.48,
|
59 |
-
"acc_stderr": 0.050211673156867795,
|
60 |
-
"acc_norm": 0.48,
|
61 |
-
"acc_norm_stderr": 0.050211673156867795
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
64 |
-
"acc": 0.38,
|
65 |
-
"acc_stderr": 0.048783173121456316,
|
66 |
-
"acc_norm": 0.38,
|
67 |
-
"acc_norm_stderr": 0.048783173121456316
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_medicine|5": {
|
70 |
-
"acc": 0.5549132947976878,
|
71 |
-
"acc_stderr": 0.03789401760283647,
|
72 |
-
"acc_norm": 0.5549132947976878,
|
73 |
-
"acc_norm_stderr": 0.03789401760283647
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_physics|5": {
|
76 |
-
"acc": 0.4411764705882353,
|
77 |
-
"acc_stderr": 0.049406356306056595,
|
78 |
-
"acc_norm": 0.4411764705882353,
|
79 |
-
"acc_norm_stderr": 0.049406356306056595
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-computer_security|5": {
|
82 |
-
"acc": 0.69,
|
83 |
-
"acc_stderr": 0.04648231987117316,
|
84 |
-
"acc_norm": 0.69,
|
85 |
-
"acc_norm_stderr": 0.04648231987117316
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
88 |
-
"acc": 0.5234042553191489,
|
89 |
-
"acc_stderr": 0.03265019475033581,
|
90 |
-
"acc_norm": 0.5234042553191489,
|
91 |
-
"acc_norm_stderr": 0.03265019475033581
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-econometrics|5": {
|
94 |
-
"acc": 0.42105263157894735,
|
95 |
-
"acc_stderr": 0.046446020912223177,
|
96 |
-
"acc_norm": 0.42105263157894735,
|
97 |
-
"acc_norm_stderr": 0.046446020912223177
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
100 |
-
"acc": 0.5310344827586206,
|
101 |
-
"acc_stderr": 0.04158632762097828,
|
102 |
-
"acc_norm": 0.5310344827586206,
|
103 |
-
"acc_norm_stderr": 0.04158632762097828
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
106 |
-
"acc": 0.4021164021164021,
|
107 |
-
"acc_stderr": 0.025253032554997695,
|
108 |
-
"acc_norm": 0.4021164021164021,
|
109 |
-
"acc_norm_stderr": 0.025253032554997695
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-formal_logic|5": {
|
112 |
-
"acc": 0.38095238095238093,
|
113 |
-
"acc_stderr": 0.043435254289490965,
|
114 |
-
"acc_norm": 0.38095238095238093,
|
115 |
-
"acc_norm_stderr": 0.043435254289490965
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-global_facts|5": {
|
118 |
-
"acc": 0.35,
|
119 |
-
"acc_stderr": 0.0479372485441102,
|
120 |
-
"acc_norm": 0.35,
|
121 |
-
"acc_norm_stderr": 0.0479372485441102
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
124 |
-
"acc": 0.7580645161290323,
|
125 |
-
"acc_stderr": 0.024362599693031096,
|
126 |
-
"acc_norm": 0.7580645161290323,
|
127 |
-
"acc_norm_stderr": 0.024362599693031096
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
130 |
-
"acc": 0.46798029556650245,
|
131 |
-
"acc_stderr": 0.03510766597959217,
|
132 |
-
"acc_norm": 0.46798029556650245,
|
133 |
-
"acc_norm_stderr": 0.03510766597959217
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
136 |
-
"acc": 0.63,
|
137 |
-
"acc_stderr": 0.04852365870939099,
|
138 |
-
"acc_norm": 0.63,
|
139 |
-
"acc_norm_stderr": 0.04852365870939099
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
142 |
-
"acc": 0.7393939393939394,
|
143 |
-
"acc_stderr": 0.034277431758165236,
|
144 |
-
"acc_norm": 0.7393939393939394,
|
145 |
-
"acc_norm_stderr": 0.034277431758165236
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
148 |
-
"acc": 0.8080808080808081,
|
149 |
-
"acc_stderr": 0.02805779167298902,
|
150 |
-
"acc_norm": 0.8080808080808081,
|
151 |
-
"acc_norm_stderr": 0.02805779167298902
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
154 |
-
"acc": 0.8652849740932642,
|
155 |
-
"acc_stderr": 0.02463978909770944,
|
156 |
-
"acc_norm": 0.8652849740932642,
|
157 |
-
"acc_norm_stderr": 0.02463978909770944
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
160 |
-
"acc": 0.6051282051282051,
|
161 |
-
"acc_stderr": 0.024784316942156392,
|
162 |
-
"acc_norm": 0.6051282051282051,
|
163 |
-
"acc_norm_stderr": 0.024784316942156392
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
166 |
-
"acc": 0.32222222222222224,
|
167 |
-
"acc_stderr": 0.028493465091028597,
|
168 |
-
"acc_norm": 0.32222222222222224,
|
169 |
-
"acc_norm_stderr": 0.028493465091028597
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
172 |
-
"acc": 0.6722689075630253,
|
173 |
-
"acc_stderr": 0.030489911417673227,
|
174 |
-
"acc_norm": 0.6722689075630253,
|
175 |
-
"acc_norm_stderr": 0.030489911417673227
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
178 |
-
"acc": 0.3443708609271523,
|
179 |
-
"acc_stderr": 0.038796870240733264,
|
180 |
-
"acc_norm": 0.3443708609271523,
|
181 |
-
"acc_norm_stderr": 0.038796870240733264
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
184 |
-
"acc": 0.8091743119266055,
|
185 |
-
"acc_stderr": 0.016847676400091098,
|
186 |
-
"acc_norm": 0.8091743119266055,
|
187 |
-
"acc_norm_stderr": 0.016847676400091098
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
190 |
-
"acc": 0.5,
|
191 |
-
"acc_stderr": 0.034099716973523674,
|
192 |
-
"acc_norm": 0.5,
|
193 |
-
"acc_norm_stderr": 0.034099716973523674
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
196 |
-
"acc": 0.8186274509803921,
|
197 |
-
"acc_stderr": 0.027044621719474082,
|
198 |
-
"acc_norm": 0.8186274509803921,
|
199 |
-
"acc_norm_stderr": 0.027044621719474082
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
202 |
-
"acc": 0.8227848101265823,
|
203 |
-
"acc_stderr": 0.024856364184503224,
|
204 |
-
"acc_norm": 0.8227848101265823,
|
205 |
-
"acc_norm_stderr": 0.024856364184503224
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-human_aging|5": {
|
208 |
-
"acc": 0.695067264573991,
|
209 |
-
"acc_stderr": 0.030898610882477515,
|
210 |
-
"acc_norm": 0.695067264573991,
|
211 |
-
"acc_norm_stderr": 0.030898610882477515
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
214 |
-
"acc": 0.732824427480916,
|
215 |
-
"acc_stderr": 0.03880848301082397,
|
216 |
-
"acc_norm": 0.732824427480916,
|
217 |
-
"acc_norm_stderr": 0.03880848301082397
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-international_law|5": {
|
220 |
-
"acc": 0.8016528925619835,
|
221 |
-
"acc_stderr": 0.036401182719909476,
|
222 |
-
"acc_norm": 0.8016528925619835,
|
223 |
-
"acc_norm_stderr": 0.036401182719909476
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
226 |
-
"acc": 0.7870370370370371,
|
227 |
-
"acc_stderr": 0.0395783547198098,
|
228 |
-
"acc_norm": 0.7870370370370371,
|
229 |
-
"acc_norm_stderr": 0.0395783547198098
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
232 |
-
"acc": 0.6932515337423313,
|
233 |
-
"acc_stderr": 0.03623089915724147,
|
234 |
-
"acc_norm": 0.6932515337423313,
|
235 |
-
"acc_norm_stderr": 0.03623089915724147
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-machine_learning|5": {
|
238 |
-
"acc": 0.49107142857142855,
|
239 |
-
"acc_stderr": 0.04745033255489123,
|
240 |
-
"acc_norm": 0.49107142857142855,
|
241 |
-
"acc_norm_stderr": 0.04745033255489123
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-management|5": {
|
244 |
-
"acc": 0.8058252427184466,
|
245 |
-
"acc_stderr": 0.03916667762822584,
|
246 |
-
"acc_norm": 0.8058252427184466,
|
247 |
-
"acc_norm_stderr": 0.03916667762822584
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-marketing|5": {
|
250 |
-
"acc": 0.8461538461538461,
|
251 |
-
"acc_stderr": 0.023636873317489288,
|
252 |
-
"acc_norm": 0.8461538461538461,
|
253 |
-
"acc_norm_stderr": 0.023636873317489288
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
256 |
-
"acc": 0.64,
|
257 |
-
"acc_stderr": 0.04824181513244218,
|
258 |
-
"acc_norm": 0.64,
|
259 |
-
"acc_norm_stderr": 0.04824181513244218
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
262 |
-
"acc": 0.7969348659003831,
|
263 |
-
"acc_stderr": 0.014385525076611567,
|
264 |
-
"acc_norm": 0.7969348659003831,
|
265 |
-
"acc_norm_stderr": 0.014385525076611567
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
268 |
-
"acc": 0.6965317919075145,
|
269 |
-
"acc_stderr": 0.024752411960917202,
|
270 |
-
"acc_norm": 0.6965317919075145,
|
271 |
-
"acc_norm_stderr": 0.024752411960917202
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
274 |
-
"acc": 0.47262569832402235,
|
275 |
-
"acc_stderr": 0.016697420650642752,
|
276 |
-
"acc_norm": 0.47262569832402235,
|
277 |
-
"acc_norm_stderr": 0.016697420650642752
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-nutrition|5": {
|
280 |
-
"acc": 0.6405228758169934,
|
281 |
-
"acc_stderr": 0.027475969910660952,
|
282 |
-
"acc_norm": 0.6405228758169934,
|
283 |
-
"acc_norm_stderr": 0.027475969910660952
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-philosophy|5": {
|
286 |
-
"acc": 0.662379421221865,
|
287 |
-
"acc_stderr": 0.026858825879488533,
|
288 |
-
"acc_norm": 0.662379421221865,
|
289 |
-
"acc_norm_stderr": 0.026858825879488533
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-prehistory|5": {
|
292 |
-
"acc": 0.7314814814814815,
|
293 |
-
"acc_stderr": 0.024659685185967294,
|
294 |
-
"acc_norm": 0.7314814814814815,
|
295 |
-
"acc_norm_stderr": 0.024659685185967294
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
298 |
-
"acc": 0.46099290780141844,
|
299 |
-
"acc_stderr": 0.029736592526424438,
|
300 |
-
"acc_norm": 0.46099290780141844,
|
301 |
-
"acc_norm_stderr": 0.029736592526424438
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-professional_law|5": {
|
304 |
-
"acc": 0.46870925684485004,
|
305 |
-
"acc_stderr": 0.012745204626083138,
|
306 |
-
"acc_norm": 0.46870925684485004,
|
307 |
-
"acc_norm_stderr": 0.012745204626083138
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
310 |
-
"acc": 0.6029411764705882,
|
311 |
-
"acc_stderr": 0.02972215209928006,
|
312 |
-
"acc_norm": 0.6029411764705882,
|
313 |
-
"acc_norm_stderr": 0.02972215209928006
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
316 |
-
"acc": 0.6568627450980392,
|
317 |
-
"acc_stderr": 0.019206606848825365,
|
318 |
-
"acc_norm": 0.6568627450980392,
|
319 |
-
"acc_norm_stderr": 0.019206606848825365
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-public_relations|5": {
|
322 |
-
"acc": 0.6636363636363637,
|
323 |
-
"acc_stderr": 0.04525393596302506,
|
324 |
-
"acc_norm": 0.6636363636363637,
|
325 |
-
"acc_norm_stderr": 0.04525393596302506
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-security_studies|5": {
|
328 |
-
"acc": 0.746938775510204,
|
329 |
-
"acc_stderr": 0.027833023871399683,
|
330 |
-
"acc_norm": 0.746938775510204,
|
331 |
-
"acc_norm_stderr": 0.027833023871399683
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-sociology|5": {
|
334 |
-
"acc": 0.8059701492537313,
|
335 |
-
"acc_stderr": 0.02796267760476891,
|
336 |
-
"acc_norm": 0.8059701492537313,
|
337 |
-
"acc_norm_stderr": 0.02796267760476891
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
340 |
-
"acc": 0.83,
|
341 |
-
"acc_stderr": 0.0377525168068637,
|
342 |
-
"acc_norm": 0.83,
|
343 |
-
"acc_norm_stderr": 0.0377525168068637
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-virology|5": {
|
346 |
-
"acc": 0.5,
|
347 |
-
"acc_stderr": 0.03892494720807614,
|
348 |
-
"acc_norm": 0.5,
|
349 |
-
"acc_norm_stderr": 0.03892494720807614
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-world_religions|5": {
|
352 |
-
"acc": 0.7894736842105263,
|
353 |
-
"acc_stderr": 0.03126781714663179,
|
354 |
-
"acc_norm": 0.7894736842105263,
|
355 |
-
"acc_norm_stderr": 0.03126781714663179
|
356 |
-
},
|
357 |
-
"harness|truthfulqa:mc|0": {
|
358 |
-
"mc1": 0.397796817625459,
|
359 |
-
"mc1_stderr": 0.017133934248559628,
|
360 |
-
"mc2": 0.5629965879248433,
|
361 |
-
"mc2_stderr": 0.015490680918668455
|
362 |
-
},
|
363 |
-
"all": {
|
364 |
-
"acc": 0.6195500477030734,
|
365 |
-
"acc_stderr": 0.033573377369222265,
|
366 |
-
"acc_norm": 0.623405584649783,
|
367 |
-
"acc_norm_stderr": 0.03354921754703927,
|
368 |
-
"mc1": 0.397796817625459,
|
369 |
-
"mc1_stderr": 0.017133934248559628,
|
370 |
-
"mc2": 0.5629965879248433,
|
371 |
-
"mc2_stderr": 0.015490680918668455
|
372 |
-
}
|
373 |
-
},
|
374 |
-
"versions": {
|
375 |
-
"harness|arc:challenge|25": 0,
|
376 |
-
"harness|hellaswag|10": 0,
|
377 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
378 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
379 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
380 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
381 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
382 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
383 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
384 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
385 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
386 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
387 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
388 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
389 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
390 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
391 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
392 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
393 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
394 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
395 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
396 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
397 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
398 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
399 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
400 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
401 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
402 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
403 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
404 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
405 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
406 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
409 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
410 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
411 |
-
"harness|hendrycksTest-international_law|5": 1,
|
412 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
413 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
414 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
415 |
-
"harness|hendrycksTest-management|5": 1,
|
416 |
-
"harness|hendrycksTest-marketing|5": 1,
|
417 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
418 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
419 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
420 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
421 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
422 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
423 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
424 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
425 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
426 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
427 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
428 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
429 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
430 |
-
"harness|hendrycksTest-sociology|5": 1,
|
431 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
432 |
-
"harness|hendrycksTest-virology|5": 1,
|
433 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
434 |
-
"harness|truthfulqa:mc|0": 1,
|
435 |
-
"all": 0
|
436 |
-
},
|
437 |
-
"task_config": {
|
438 |
-
"harness|arc:challenge": "LM Harness task",
|
439 |
-
"harness|hellaswag": "LM Harness task",
|
440 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
441 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
442 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
443 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
444 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
445 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
446 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
447 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
448 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
449 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
450 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
451 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
497 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
498 |
-
},
|
499 |
-
"hashes": {
|
500 |
-
"harness|arc:challenge|25": {
|
501 |
-
"hash_examples": "fb8c51b1872daeda",
|
502 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
503 |
-
"hash_input_tokens": "61571bf68d6d89aa",
|
504 |
-
"hash_cont_tokens": "8210decc6ff6f7df"
|
505 |
-
},
|
506 |
-
"harness|hellaswag|10": {
|
507 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
508 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
509 |
-
"hash_input_tokens": "29906669b1c7054a",
|
510 |
-
"hash_cont_tokens": "b3b9e9017afa63af"
|
511 |
-
},
|
512 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
513 |
-
"hash_examples": "280f9f325b40559a",
|
514 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
515 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
516 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
517 |
-
},
|
518 |
-
"harness|hendrycksTest-anatomy|5": {
|
519 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
520 |
-
"hash_full_prompts": "516f74bef25df620",
|
521 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
522 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
523 |
-
},
|
524 |
-
"harness|hendrycksTest-astronomy|5": {
|
525 |
-
"hash_examples": "7d587b908da4d762",
|
526 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
527 |
-
"hash_input_tokens": "277a7b1fad566940",
|
528 |
-
"hash_cont_tokens": "bf30e5d3f48250cb"
|
529 |
-
},
|
530 |
-
"harness|hendrycksTest-business_ethics|5": {
|
531 |
-
"hash_examples": "33e51740670de686",
|
532 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
533 |
-
"hash_input_tokens": "ba552605bc116de5",
|
534 |
-
"hash_cont_tokens": "bc1dd9b2d995eb61"
|
535 |
-
},
|
536 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
537 |
-
"hash_examples": "f3366dbe7eefffa4",
|
538 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
539 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
540 |
-
"hash_cont_tokens": "890a119624b3b935"
|
541 |
-
},
|
542 |
-
"harness|hendrycksTest-college_biology|5": {
|
543 |
-
"hash_examples": "ca2b6753a0193e7f",
|
544 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
545 |
-
"hash_input_tokens": "da036601573942e2",
|
546 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
547 |
-
},
|
548 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
549 |
-
"hash_examples": "22ff85f1d34f42d1",
|
550 |
-
"hash_full_prompts": "242c9be6da583e95",
|
551 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
552 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
553 |
-
},
|
554 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
555 |
-
"hash_examples": "30318289d717a5cf",
|
556 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
557 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
558 |
-
"hash_cont_tokens": "ffc0fe414cdc4a83"
|
559 |
-
},
|
560 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
561 |
-
"hash_examples": "4944d1f0b6b5d911",
|
562 |
-
"hash_full_prompts": "770bc4281c973190",
|
563 |
-
"hash_input_tokens": "614054d17109a25d",
|
564 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
565 |
-
},
|
566 |
-
"harness|hendrycksTest-college_medicine|5": {
|
567 |
-
"hash_examples": "dd69cc33381275af",
|
568 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
569 |
-
"hash_input_tokens": "1d633b3cc0524ba8",
|
570 |
-
"hash_cont_tokens": "1f88b00d41957d82"
|
571 |
-
},
|
572 |
-
"harness|hendrycksTest-college_physics|5": {
|
573 |
-
"hash_examples": "875dd26d22655b0d",
|
574 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
575 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
576 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
577 |
-
},
|
578 |
-
"harness|hendrycksTest-computer_security|5": {
|
579 |
-
"hash_examples": "006451eedc0ededb",
|
580 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
581 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
582 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
583 |
-
},
|
584 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
585 |
-
"hash_examples": "8874ece872d2ca4c",
|
586 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
587 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
588 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
589 |
-
},
|
590 |
-
"harness|hendrycksTest-econometrics|5": {
|
591 |
-
"hash_examples": "64d3623b0bfaa43f",
|
592 |
-
"hash_full_prompts": "612f340fae41338d",
|
593 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
594 |
-
"hash_cont_tokens": "bfb7e3c3c88313f1"
|
595 |
-
},
|
596 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
597 |
-
"hash_examples": "e98f51780c674d7e",
|
598 |
-
"hash_full_prompts": "10275b312d812ae6",
|
599 |
-
"hash_input_tokens": "1590c84291399be8",
|
600 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
601 |
-
},
|
602 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
603 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
604 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
605 |
-
"hash_input_tokens": "3269597f715b0da1",
|
606 |
-
"hash_cont_tokens": "f52691aef15a407b"
|
607 |
-
},
|
608 |
-
"harness|hendrycksTest-formal_logic|5": {
|
609 |
-
"hash_examples": "5a6525665f63ea72",
|
610 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
611 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
612 |
-
"hash_cont_tokens": "f515d598d9c21263"
|
613 |
-
},
|
614 |
-
"harness|hendrycksTest-global_facts|5": {
|
615 |
-
"hash_examples": "371d70d743b2b89b",
|
616 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
617 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
618 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
619 |
-
},
|
620 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
621 |
-
"hash_examples": "a79e1018b1674052",
|
622 |
-
"hash_full_prompts": "e624e26ede922561",
|
623 |
-
"hash_input_tokens": "24423acb928db768",
|
624 |
-
"hash_cont_tokens": "bd85a4156a3613ee"
|
625 |
-
},
|
626 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
627 |
-
"hash_examples": "44bfc25c389f0e03",
|
628 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
629 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
630 |
-
"hash_cont_tokens": "a95c97af1c14e068"
|
631 |
-
},
|
632 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
633 |
-
"hash_examples": "8b8cdb1084f24169",
|
634 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
635 |
-
"hash_input_tokens": "8c34e0f2bda77358",
|
636 |
-
"hash_cont_tokens": "8abfedef914e33c9"
|
637 |
-
},
|
638 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
639 |
-
"hash_examples": "11cd32d0ef440171",
|
640 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
641 |
-
"hash_input_tokens": "f1f73dd687da18d7",
|
642 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
643 |
-
},
|
644 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
645 |
-
"hash_examples": "b60019b9e80b642f",
|
646 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
647 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
648 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
649 |
-
},
|
650 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
651 |
-
"hash_examples": "d221ec983d143dc3",
|
652 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
653 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
654 |
-
"hash_cont_tokens": "a83effb8f76b7d7c"
|
655 |
-
},
|
656 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
657 |
-
"hash_examples": "59c2915cacfd3fbb",
|
658 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
659 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
660 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
661 |
-
},
|
662 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
663 |
-
"hash_examples": "1f8ac897608de342",
|
664 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
665 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
666 |
-
"hash_cont_tokens": "24f5dc613660300b"
|
667 |
-
},
|
668 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
669 |
-
"hash_examples": "ead6a0f2f6c83370",
|
670 |
-
"hash_full_prompts": "bfc393381298609e",
|
671 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
672 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
673 |
-
},
|
674 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
675 |
-
"hash_examples": "c3f2025990afec64",
|
676 |
-
"hash_full_prompts": "fc78b4997e436734",
|
677 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
678 |
-
"hash_cont_tokens": "ba2efcd283e938cc"
|
679 |
-
},
|
680 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
681 |
-
"hash_examples": "21f8aab618f6d636",
|
682 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
683 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
684 |
-
"hash_cont_tokens": "942069cd363844d9"
|
685 |
-
},
|
686 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
687 |
-
"hash_examples": "2386a60a11fc5de3",
|
688 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
689 |
-
"hash_input_tokens": "424b02981230ee83",
|
690 |
-
"hash_cont_tokens": "955ed42b6f7fa019"
|
691 |
-
},
|
692 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
693 |
-
"hash_examples": "74961543be40f04f",
|
694 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
695 |
-
"hash_input_tokens": "50c9ff438c85a69e",
|
696 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
697 |
-
},
|
698 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
699 |
-
"hash_examples": "2ad2f6b7198b2234",
|
700 |
-
"hash_full_prompts": "11845057459afd72",
|
701 |
-
"hash_input_tokens": "054824cc474caef5",
|
702 |
-
"hash_cont_tokens": "9a864184946033ac"
|
703 |
-
},
|
704 |
-
"harness|hendrycksTest-human_aging|5": {
|
705 |
-
"hash_examples": "1a7199dc733e779b",
|
706 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
707 |
-
"hash_input_tokens": "541a75f071dcf579",
|
708 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
709 |
-
},
|
710 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
711 |
-
"hash_examples": "7acb8fdad97f88a6",
|
712 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
713 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
714 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
715 |
-
},
|
716 |
-
"harness|hendrycksTest-international_law|5": {
|
717 |
-
"hash_examples": "1300bfd0dfc59114",
|
718 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
719 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
720 |
-
"hash_cont_tokens": "dc45b45fcda18e5d"
|
721 |
-
},
|
722 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
723 |
-
"hash_examples": "083b1e4904c48dc2",
|
724 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
725 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
726 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
727 |
-
},
|
728 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
729 |
-
"hash_examples": "709128f9926a634c",
|
730 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
731 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
732 |
-
"hash_cont_tokens": "1e80dbd30f6453d5"
|
733 |
-
},
|
734 |
-
"harness|hendrycksTest-machine_learning|5": {
|
735 |
-
"hash_examples": "88f22a636029ae47",
|
736 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
737 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
738 |
-
"hash_cont_tokens": "9b37da7777378ca9"
|
739 |
-
},
|
740 |
-
"harness|hendrycksTest-management|5": {
|
741 |
-
"hash_examples": "8c8a1e07a2151dca",
|
742 |
-
"hash_full_prompts": "f51611f514b265b0",
|
743 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
744 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
745 |
-
},
|
746 |
-
"harness|hendrycksTest-marketing|5": {
|
747 |
-
"hash_examples": "2668953431f91e96",
|
748 |
-
"hash_full_prompts": "77562bef997c7650",
|
749 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
750 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
751 |
-
},
|
752 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
753 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
754 |
-
"hash_full_prompts": "202139046daa118f",
|
755 |
-
"hash_input_tokens": "886ca823b41c094a",
|
756 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
757 |
-
},
|
758 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
759 |
-
"hash_examples": "41adb694024809c2",
|
760 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
761 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
762 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
763 |
-
},
|
764 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
765 |
-
"hash_examples": "3171c13ba3c594c4",
|
766 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
767 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
768 |
-
"hash_cont_tokens": "8badf768f7b0467a"
|
769 |
-
},
|
770 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
771 |
-
"hash_examples": "9873e077e83e0546",
|
772 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
773 |
-
"hash_input_tokens": "3e793631e951f23c",
|
774 |
-
"hash_cont_tokens": "32ae620376b2bbba"
|
775 |
-
},
|
776 |
-
"harness|hendrycksTest-nutrition|5": {
|
777 |
-
"hash_examples": "7db1d8142ec14323",
|
778 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
779 |
-
"hash_input_tokens": "59753c2144ea93af",
|
780 |
-
"hash_cont_tokens": "3071def75bacc404"
|
781 |
-
},
|
782 |
-
"harness|hendrycksTest-philosophy|5": {
|
783 |
-
"hash_examples": "9b455b7d72811cc8",
|
784 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
785 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
786 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
787 |
-
},
|
788 |
-
"harness|hendrycksTest-prehistory|5": {
|
789 |
-
"hash_examples": "8be90d0f538f1560",
|
790 |
-
"hash_full_prompts": "152187949bcd0921",
|
791 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
792 |
-
"hash_cont_tokens": "de469d2b981e32a3"
|
793 |
-
},
|
794 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
795 |
-
"hash_examples": "8d377597916cd07e",
|
796 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
797 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
798 |
-
"hash_cont_tokens": "c46f74d2dfc7b13b"
|
799 |
-
},
|
800 |
-
"harness|hendrycksTest-professional_law|5": {
|
801 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
802 |
-
"hash_full_prompts": "36ac764272bfb182",
|
803 |
-
"hash_input_tokens": "9178e10bd0763ec4",
|
804 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
805 |
-
},
|
806 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
807 |
-
"hash_examples": "b20e4e816c1e383e",
|
808 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
809 |
-
"hash_input_tokens": "f5a22012a54f70ea",
|
810 |
-
"hash_cont_tokens": "fe35cfa9c6ca802e"
|
811 |
-
},
|
812 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
813 |
-
"hash_examples": "d45b73b22f9cc039",
|
814 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
815 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
816 |
-
"hash_cont_tokens": "f020fbddf72c8652"
|
817 |
-
},
|
818 |
-
"harness|hendrycksTest-public_relations|5": {
|
819 |
-
"hash_examples": "0d25072e1761652a",
|
820 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
821 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
822 |
-
"hash_cont_tokens": "568f585a259965c1"
|
823 |
-
},
|
824 |
-
"harness|hendrycksTest-security_studies|5": {
|
825 |
-
"hash_examples": "62bb8197e63d60d4",
|
826 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
827 |
-
"hash_input_tokens": "d49711415961ced7",
|
828 |
-
"hash_cont_tokens": "cc6fd7cccd64cd5d"
|
829 |
-
},
|
830 |
-
"harness|hendrycksTest-sociology|5": {
|
831 |
-
"hash_examples": "e7959df87dea8672",
|
832 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
833 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
834 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
835 |
-
},
|
836 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
837 |
-
"hash_examples": "4a56a01ddca44dca",
|
838 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
839 |
-
"hash_input_tokens": "42054621e718dbee",
|
840 |
-
"hash_cont_tokens": "2568d0e8e36fa959"
|
841 |
-
},
|
842 |
-
"harness|hendrycksTest-virology|5": {
|
843 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
844 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
845 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
846 |
-
"hash_cont_tokens": "926cf60b0891f374"
|
847 |
-
},
|
848 |
-
"harness|hendrycksTest-world_religions|5": {
|
849 |
-
"hash_examples": "3b29cfaf1a81c379",
|
850 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
851 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
852 |
-
"hash_cont_tokens": "c525a5de974c1ea3"
|
853 |
-
},
|
854 |
-
"harness|truthfulqa:mc|0": {
|
855 |
-
"hash_examples": "23176c0531c7b867",
|
856 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
857 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
858 |
-
"hash_cont_tokens": "c014154380b74b9e"
|
859 |
-
}
|
860 |
-
},
|
861 |
-
"config_general": {
|
862 |
-
"model_name": "upstage/llama-30b-instruct-2048",
|
863 |
-
"model_sha": "be44a37814a20e790063086703f570732597887a",
|
864 |
-
"model_dtype": "torch.float16",
|
865 |
-
"lighteval_sha": "43cff840721bd0214adb4e29236a5e2ca1813937",
|
866 |
-
"num_few_shot_default": 0,
|
867 |
-
"num_fewshot_seeds": 1,
|
868 |
-
"override_batch_size": 1,
|
869 |
-
"max_samples": null
|
870 |
-
}
|
871 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-30b-instruct-2048/results_2023-10-19T00-52-48.467311.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/llama-30b-instruct-2048",
|
4 |
-
"model_sha": "9f246be5c6c08bf397ff7b42aa5fe91d011d9ace",
|
5 |
-
"model_size": "60.65 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.35539010067114096,
|
17 |
-
"em_stderr": 0.004901633098104223,
|
18 |
-
"f1": 0.44463611577181344,
|
19 |
-
"f1_stderr": 0.004655171488287754
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.17816527672479152,
|
23 |
-
"acc_stderr": 0.01054013252754947
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.7955801104972375,
|
27 |
-
"acc_stderr": 0.011334090612597212
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.35539010067114096,
|
31 |
-
"em_stderr": 0.004901633098104223,
|
32 |
-
"f1": 0.44463611577181344,
|
33 |
-
"f1_stderr": 0.004655171488287754,
|
34 |
-
"acc": 0.48687269361101454,
|
35 |
-
"acc_stderr": 0.010937111570073342
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"harness|drop|3": 1,
|
40 |
-
"harness|gsm8k|5": 0,
|
41 |
-
"harness|winogrande|5": 0,
|
42 |
-
"all": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "61b608e0b5ceed76",
|
55 |
-
"hash_cont_tokens": "8be7de4b577cd0e4"
|
56 |
-
},
|
57 |
-
"truncated": 1263,
|
58 |
-
"non-truncated": 8273,
|
59 |
-
"padded": 0,
|
60 |
-
"non-padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "9a51f433d2492896"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non-truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non-padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non-truncated": 2534,
|
87 |
-
"padded": 2432,
|
88 |
-
"non-padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "80afe720f936f8d2",
|
98 |
-
"hash_cont_tokens": "8da53431b9f546a1"
|
99 |
-
},
|
100 |
-
"total_evaluation_time_secondes": "67098.26278543472",
|
101 |
-
"truncated": 1263,
|
102 |
-
"non-truncated": 12126,
|
103 |
-
"padded": 2432,
|
104 |
-
"non-padded": 10957,
|
105 |
-
"num_truncated_few_shots": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-30b-instruct/results_2023-07-19T22-33-00.369415.json
DELETED
@@ -1,871 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|arc:challenge|25": {
|
4 |
-
"acc": 0.6049488054607508,
|
5 |
-
"acc_stderr": 0.014285898292938167,
|
6 |
-
"acc_norm": 0.6245733788395904,
|
7 |
-
"acc_norm_stderr": 0.014150631435111728
|
8 |
-
},
|
9 |
-
"harness|hellaswag|10": {
|
10 |
-
"acc": 0.6722764389563832,
|
11 |
-
"acc_stderr": 0.004684241685200317,
|
12 |
-
"acc_norm": 0.8622784305915157,
|
13 |
-
"acc_norm_stderr": 0.0034390323355350397
|
14 |
-
},
|
15 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
16 |
-
"acc": 0.29,
|
17 |
-
"acc_stderr": 0.04560480215720684,
|
18 |
-
"acc_norm": 0.29,
|
19 |
-
"acc_norm_stderr": 0.04560480215720684
|
20 |
-
},
|
21 |
-
"harness|hendrycksTest-anatomy|5": {
|
22 |
-
"acc": 0.5111111111111111,
|
23 |
-
"acc_stderr": 0.04318275491977976,
|
24 |
-
"acc_norm": 0.5111111111111111,
|
25 |
-
"acc_norm_stderr": 0.04318275491977976
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-astronomy|5": {
|
28 |
-
"acc": 0.6381578947368421,
|
29 |
-
"acc_stderr": 0.03910525752849726,
|
30 |
-
"acc_norm": 0.6381578947368421,
|
31 |
-
"acc_norm_stderr": 0.03910525752849726
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-business_ethics|5": {
|
34 |
-
"acc": 0.61,
|
35 |
-
"acc_stderr": 0.04902071300001974,
|
36 |
-
"acc_norm": 0.61,
|
37 |
-
"acc_norm_stderr": 0.04902071300001974
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
40 |
-
"acc": 0.5924528301886792,
|
41 |
-
"acc_stderr": 0.030242233800854494,
|
42 |
-
"acc_norm": 0.5924528301886792,
|
43 |
-
"acc_norm_stderr": 0.030242233800854494
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-college_biology|5": {
|
46 |
-
"acc": 0.6111111111111112,
|
47 |
-
"acc_stderr": 0.04076663253918567,
|
48 |
-
"acc_norm": 0.6111111111111112,
|
49 |
-
"acc_norm_stderr": 0.04076663253918567
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
52 |
-
"acc": 0.34,
|
53 |
-
"acc_stderr": 0.04760952285695235,
|
54 |
-
"acc_norm": 0.34,
|
55 |
-
"acc_norm_stderr": 0.04760952285695235
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
58 |
-
"acc": 0.47,
|
59 |
-
"acc_stderr": 0.050161355804659205,
|
60 |
-
"acc_norm": 0.47,
|
61 |
-
"acc_norm_stderr": 0.050161355804659205
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
64 |
-
"acc": 0.43,
|
65 |
-
"acc_stderr": 0.049756985195624284,
|
66 |
-
"acc_norm": 0.43,
|
67 |
-
"acc_norm_stderr": 0.049756985195624284
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_medicine|5": {
|
70 |
-
"acc": 0.4508670520231214,
|
71 |
-
"acc_stderr": 0.037940126746970296,
|
72 |
-
"acc_norm": 0.4508670520231214,
|
73 |
-
"acc_norm_stderr": 0.037940126746970296
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_physics|5": {
|
76 |
-
"acc": 0.30392156862745096,
|
77 |
-
"acc_stderr": 0.045766654032077636,
|
78 |
-
"acc_norm": 0.30392156862745096,
|
79 |
-
"acc_norm_stderr": 0.045766654032077636
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-computer_security|5": {
|
82 |
-
"acc": 0.67,
|
83 |
-
"acc_stderr": 0.04725815626252609,
|
84 |
-
"acc_norm": 0.67,
|
85 |
-
"acc_norm_stderr": 0.04725815626252609
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
88 |
-
"acc": 0.48936170212765956,
|
89 |
-
"acc_stderr": 0.03267862331014063,
|
90 |
-
"acc_norm": 0.48936170212765956,
|
91 |
-
"acc_norm_stderr": 0.03267862331014063
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-econometrics|5": {
|
94 |
-
"acc": 0.39473684210526316,
|
95 |
-
"acc_stderr": 0.045981880578165414,
|
96 |
-
"acc_norm": 0.39473684210526316,
|
97 |
-
"acc_norm_stderr": 0.045981880578165414
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
100 |
-
"acc": 0.4827586206896552,
|
101 |
-
"acc_stderr": 0.04164188720169377,
|
102 |
-
"acc_norm": 0.4827586206896552,
|
103 |
-
"acc_norm_stderr": 0.04164188720169377
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
106 |
-
"acc": 0.36507936507936506,
|
107 |
-
"acc_stderr": 0.02479606060269995,
|
108 |
-
"acc_norm": 0.36507936507936506,
|
109 |
-
"acc_norm_stderr": 0.02479606060269995
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-formal_logic|5": {
|
112 |
-
"acc": 0.36507936507936506,
|
113 |
-
"acc_stderr": 0.04306241259127153,
|
114 |
-
"acc_norm": 0.36507936507936506,
|
115 |
-
"acc_norm_stderr": 0.04306241259127153
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-global_facts|5": {
|
118 |
-
"acc": 0.39,
|
119 |
-
"acc_stderr": 0.04902071300001974,
|
120 |
-
"acc_norm": 0.39,
|
121 |
-
"acc_norm_stderr": 0.04902071300001974
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
124 |
-
"acc": 0.7096774193548387,
|
125 |
-
"acc_stderr": 0.02582210611941589,
|
126 |
-
"acc_norm": 0.7096774193548387,
|
127 |
-
"acc_norm_stderr": 0.02582210611941589
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
130 |
-
"acc": 0.43349753694581283,
|
131 |
-
"acc_stderr": 0.03486731727419872,
|
132 |
-
"acc_norm": 0.43349753694581283,
|
133 |
-
"acc_norm_stderr": 0.03486731727419872
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
136 |
-
"acc": 0.63,
|
137 |
-
"acc_stderr": 0.04852365870939099,
|
138 |
-
"acc_norm": 0.63,
|
139 |
-
"acc_norm_stderr": 0.04852365870939099
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
142 |
-
"acc": 0.7515151515151515,
|
143 |
-
"acc_stderr": 0.033744026441394036,
|
144 |
-
"acc_norm": 0.7515151515151515,
|
145 |
-
"acc_norm_stderr": 0.033744026441394036
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
148 |
-
"acc": 0.7828282828282829,
|
149 |
-
"acc_stderr": 0.029376616484945627,
|
150 |
-
"acc_norm": 0.7828282828282829,
|
151 |
-
"acc_norm_stderr": 0.029376616484945627
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
154 |
-
"acc": 0.8756476683937824,
|
155 |
-
"acc_stderr": 0.023814477086593566,
|
156 |
-
"acc_norm": 0.8756476683937824,
|
157 |
-
"acc_norm_stderr": 0.023814477086593566
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
160 |
-
"acc": 0.5666666666666667,
|
161 |
-
"acc_stderr": 0.025124653525885117,
|
162 |
-
"acc_norm": 0.5666666666666667,
|
163 |
-
"acc_norm_stderr": 0.025124653525885117
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
166 |
-
"acc": 0.28888888888888886,
|
167 |
-
"acc_stderr": 0.027634907264178544,
|
168 |
-
"acc_norm": 0.28888888888888886,
|
169 |
-
"acc_norm_stderr": 0.027634907264178544
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
172 |
-
"acc": 0.5966386554621849,
|
173 |
-
"acc_stderr": 0.031866081214088314,
|
174 |
-
"acc_norm": 0.5966386554621849,
|
175 |
-
"acc_norm_stderr": 0.031866081214088314
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
178 |
-
"acc": 0.3973509933774834,
|
179 |
-
"acc_stderr": 0.039955240076816806,
|
180 |
-
"acc_norm": 0.3973509933774834,
|
181 |
-
"acc_norm_stderr": 0.039955240076816806
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
184 |
-
"acc": 0.7871559633027523,
|
185 |
-
"acc_stderr": 0.017549376389313694,
|
186 |
-
"acc_norm": 0.7871559633027523,
|
187 |
-
"acc_norm_stderr": 0.017549376389313694
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
190 |
-
"acc": 0.42592592592592593,
|
191 |
-
"acc_stderr": 0.03372343271653063,
|
192 |
-
"acc_norm": 0.42592592592592593,
|
193 |
-
"acc_norm_stderr": 0.03372343271653063
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
196 |
-
"acc": 0.8186274509803921,
|
197 |
-
"acc_stderr": 0.027044621719474086,
|
198 |
-
"acc_norm": 0.8186274509803921,
|
199 |
-
"acc_norm_stderr": 0.027044621719474086
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
202 |
-
"acc": 0.8270042194092827,
|
203 |
-
"acc_stderr": 0.024621562866768434,
|
204 |
-
"acc_norm": 0.8270042194092827,
|
205 |
-
"acc_norm_stderr": 0.024621562866768434
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-human_aging|5": {
|
208 |
-
"acc": 0.6591928251121076,
|
209 |
-
"acc_stderr": 0.031811497470553604,
|
210 |
-
"acc_norm": 0.6591928251121076,
|
211 |
-
"acc_norm_stderr": 0.031811497470553604
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
214 |
-
"acc": 0.6946564885496184,
|
215 |
-
"acc_stderr": 0.04039314978724561,
|
216 |
-
"acc_norm": 0.6946564885496184,
|
217 |
-
"acc_norm_stderr": 0.04039314978724561
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-international_law|5": {
|
220 |
-
"acc": 0.768595041322314,
|
221 |
-
"acc_stderr": 0.03849856098794088,
|
222 |
-
"acc_norm": 0.768595041322314,
|
223 |
-
"acc_norm_stderr": 0.03849856098794088
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
226 |
-
"acc": 0.7314814814814815,
|
227 |
-
"acc_stderr": 0.042844679680521934,
|
228 |
-
"acc_norm": 0.7314814814814815,
|
229 |
-
"acc_norm_stderr": 0.042844679680521934
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
232 |
-
"acc": 0.7116564417177914,
|
233 |
-
"acc_stderr": 0.03559039531617342,
|
234 |
-
"acc_norm": 0.7116564417177914,
|
235 |
-
"acc_norm_stderr": 0.03559039531617342
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-machine_learning|5": {
|
238 |
-
"acc": 0.44642857142857145,
|
239 |
-
"acc_stderr": 0.04718471485219588,
|
240 |
-
"acc_norm": 0.44642857142857145,
|
241 |
-
"acc_norm_stderr": 0.04718471485219588
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-management|5": {
|
244 |
-
"acc": 0.7766990291262136,
|
245 |
-
"acc_stderr": 0.04123553189891431,
|
246 |
-
"acc_norm": 0.7766990291262136,
|
247 |
-
"acc_norm_stderr": 0.04123553189891431
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-marketing|5": {
|
250 |
-
"acc": 0.8589743589743589,
|
251 |
-
"acc_stderr": 0.02280138253459753,
|
252 |
-
"acc_norm": 0.8589743589743589,
|
253 |
-
"acc_norm_stderr": 0.02280138253459753
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
256 |
-
"acc": 0.66,
|
257 |
-
"acc_stderr": 0.04760952285695237,
|
258 |
-
"acc_norm": 0.66,
|
259 |
-
"acc_norm_stderr": 0.04760952285695237
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
262 |
-
"acc": 0.7841634738186463,
|
263 |
-
"acc_stderr": 0.014711684386139944,
|
264 |
-
"acc_norm": 0.7841634738186463,
|
265 |
-
"acc_norm_stderr": 0.014711684386139944
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
268 |
-
"acc": 0.6647398843930635,
|
269 |
-
"acc_stderr": 0.02541600377316555,
|
270 |
-
"acc_norm": 0.6647398843930635,
|
271 |
-
"acc_norm_stderr": 0.02541600377316555
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
274 |
-
"acc": 0.4547486033519553,
|
275 |
-
"acc_stderr": 0.016653875777524012,
|
276 |
-
"acc_norm": 0.4547486033519553,
|
277 |
-
"acc_norm_stderr": 0.016653875777524012
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-nutrition|5": {
|
280 |
-
"acc": 0.6176470588235294,
|
281 |
-
"acc_stderr": 0.027826109307283693,
|
282 |
-
"acc_norm": 0.6176470588235294,
|
283 |
-
"acc_norm_stderr": 0.027826109307283693
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-philosophy|5": {
|
286 |
-
"acc": 0.6913183279742765,
|
287 |
-
"acc_stderr": 0.02623696588115327,
|
288 |
-
"acc_norm": 0.6913183279742765,
|
289 |
-
"acc_norm_stderr": 0.02623696588115327
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-prehistory|5": {
|
292 |
-
"acc": 0.7191358024691358,
|
293 |
-
"acc_stderr": 0.025006469755799208,
|
294 |
-
"acc_norm": 0.7191358024691358,
|
295 |
-
"acc_norm_stderr": 0.025006469755799208
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
298 |
-
"acc": 0.4219858156028369,
|
299 |
-
"acc_stderr": 0.02946218923337059,
|
300 |
-
"acc_norm": 0.4219858156028369,
|
301 |
-
"acc_norm_stderr": 0.02946218923337059
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-professional_law|5": {
|
304 |
-
"acc": 0.4439374185136897,
|
305 |
-
"acc_stderr": 0.012689708167787682,
|
306 |
-
"acc_norm": 0.4439374185136897,
|
307 |
-
"acc_norm_stderr": 0.012689708167787682
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
310 |
-
"acc": 0.5477941176470589,
|
311 |
-
"acc_stderr": 0.030233758551596438,
|
312 |
-
"acc_norm": 0.5477941176470589,
|
313 |
-
"acc_norm_stderr": 0.030233758551596438
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
316 |
-
"acc": 0.6405228758169934,
|
317 |
-
"acc_stderr": 0.01941253924203216,
|
318 |
-
"acc_norm": 0.6405228758169934,
|
319 |
-
"acc_norm_stderr": 0.01941253924203216
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-public_relations|5": {
|
322 |
-
"acc": 0.6727272727272727,
|
323 |
-
"acc_stderr": 0.0449429086625209,
|
324 |
-
"acc_norm": 0.6727272727272727,
|
325 |
-
"acc_norm_stderr": 0.0449429086625209
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-security_studies|5": {
|
328 |
-
"acc": 0.6653061224489796,
|
329 |
-
"acc_stderr": 0.030209235226242307,
|
330 |
-
"acc_norm": 0.6653061224489796,
|
331 |
-
"acc_norm_stderr": 0.030209235226242307
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-sociology|5": {
|
334 |
-
"acc": 0.8059701492537313,
|
335 |
-
"acc_stderr": 0.027962677604768907,
|
336 |
-
"acc_norm": 0.8059701492537313,
|
337 |
-
"acc_norm_stderr": 0.027962677604768907
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
340 |
-
"acc": 0.84,
|
341 |
-
"acc_stderr": 0.03684529491774708,
|
342 |
-
"acc_norm": 0.84,
|
343 |
-
"acc_norm_stderr": 0.03684529491774708
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-virology|5": {
|
346 |
-
"acc": 0.4939759036144578,
|
347 |
-
"acc_stderr": 0.03892212195333045,
|
348 |
-
"acc_norm": 0.4939759036144578,
|
349 |
-
"acc_norm_stderr": 0.03892212195333045
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-world_religions|5": {
|
352 |
-
"acc": 0.7719298245614035,
|
353 |
-
"acc_stderr": 0.03218093795602357,
|
354 |
-
"acc_norm": 0.7719298245614035,
|
355 |
-
"acc_norm_stderr": 0.03218093795602357
|
356 |
-
},
|
357 |
-
"harness|truthfulqa:mc|0": {
|
358 |
-
"mc1": 0.3525091799265606,
|
359 |
-
"mc1_stderr": 0.016724646380756547,
|
360 |
-
"mc2": 0.5277707567124615,
|
361 |
-
"mc2_stderr": 0.015197380721177428
|
362 |
-
},
|
363 |
-
"all": {
|
364 |
-
"acc": 0.5952012613403,
|
365 |
-
"acc_stderr": 0.03377825264028914,
|
366 |
-
"acc_norm": 0.5987542539676555,
|
367 |
-
"acc_norm_stderr": 0.03375485473846725,
|
368 |
-
"mc1": 0.3525091799265606,
|
369 |
-
"mc1_stderr": 0.016724646380756547,
|
370 |
-
"mc2": 0.5277707567124615,
|
371 |
-
"mc2_stderr": 0.015197380721177428
|
372 |
-
}
|
373 |
-
},
|
374 |
-
"versions": {
|
375 |
-
"harness|arc:challenge|25": 0,
|
376 |
-
"harness|hellaswag|10": 0,
|
377 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
378 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
379 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
380 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
381 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
382 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
383 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
384 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
385 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
386 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
387 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
388 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
389 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
390 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
391 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
392 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
393 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
394 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
395 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
396 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
397 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
398 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
399 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
400 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
401 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
402 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
403 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
404 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
405 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
406 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
409 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
410 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
411 |
-
"harness|hendrycksTest-international_law|5": 1,
|
412 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
413 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
414 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
415 |
-
"harness|hendrycksTest-management|5": 1,
|
416 |
-
"harness|hendrycksTest-marketing|5": 1,
|
417 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
418 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
419 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
420 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
421 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
422 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
423 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
424 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
425 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
426 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
427 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
428 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
429 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
430 |
-
"harness|hendrycksTest-sociology|5": 1,
|
431 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
432 |
-
"harness|hendrycksTest-virology|5": 1,
|
433 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
434 |
-
"harness|truthfulqa:mc|0": 1,
|
435 |
-
"all": 0
|
436 |
-
},
|
437 |
-
"task_config": {
|
438 |
-
"harness|arc:challenge": "LM Harness task",
|
439 |
-
"harness|hellaswag": "LM Harness task",
|
440 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
441 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
442 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
443 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
444 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
445 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
446 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
447 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
448 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
449 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
450 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
451 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
497 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
498 |
-
},
|
499 |
-
"hashes": {
|
500 |
-
"harness|arc:challenge|25": {
|
501 |
-
"hash_examples": "fb8c51b1872daeda",
|
502 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
503 |
-
"hash_input_tokens": "61571bf68d6d89aa",
|
504 |
-
"hash_cont_tokens": "8210decc6ff6f7df"
|
505 |
-
},
|
506 |
-
"harness|hellaswag|10": {
|
507 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
508 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
509 |
-
"hash_input_tokens": "29906669b1c7054a",
|
510 |
-
"hash_cont_tokens": "b3b9e9017afa63af"
|
511 |
-
},
|
512 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
513 |
-
"hash_examples": "280f9f325b40559a",
|
514 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
515 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
516 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
517 |
-
},
|
518 |
-
"harness|hendrycksTest-anatomy|5": {
|
519 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
520 |
-
"hash_full_prompts": "516f74bef25df620",
|
521 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
522 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
523 |
-
},
|
524 |
-
"harness|hendrycksTest-astronomy|5": {
|
525 |
-
"hash_examples": "7d587b908da4d762",
|
526 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
527 |
-
"hash_input_tokens": "277a7b1fad566940",
|
528 |
-
"hash_cont_tokens": "bf30e5d3f48250cb"
|
529 |
-
},
|
530 |
-
"harness|hendrycksTest-business_ethics|5": {
|
531 |
-
"hash_examples": "33e51740670de686",
|
532 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
533 |
-
"hash_input_tokens": "ba552605bc116de5",
|
534 |
-
"hash_cont_tokens": "bc1dd9b2d995eb61"
|
535 |
-
},
|
536 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
537 |
-
"hash_examples": "f3366dbe7eefffa4",
|
538 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
539 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
540 |
-
"hash_cont_tokens": "890a119624b3b935"
|
541 |
-
},
|
542 |
-
"harness|hendrycksTest-college_biology|5": {
|
543 |
-
"hash_examples": "ca2b6753a0193e7f",
|
544 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
545 |
-
"hash_input_tokens": "da036601573942e2",
|
546 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
547 |
-
},
|
548 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
549 |
-
"hash_examples": "22ff85f1d34f42d1",
|
550 |
-
"hash_full_prompts": "242c9be6da583e95",
|
551 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
552 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
553 |
-
},
|
554 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
555 |
-
"hash_examples": "30318289d717a5cf",
|
556 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
557 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
558 |
-
"hash_cont_tokens": "ffc0fe414cdc4a83"
|
559 |
-
},
|
560 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
561 |
-
"hash_examples": "4944d1f0b6b5d911",
|
562 |
-
"hash_full_prompts": "770bc4281c973190",
|
563 |
-
"hash_input_tokens": "614054d17109a25d",
|
564 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
565 |
-
},
|
566 |
-
"harness|hendrycksTest-college_medicine|5": {
|
567 |
-
"hash_examples": "dd69cc33381275af",
|
568 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
569 |
-
"hash_input_tokens": "1d633b3cc0524ba8",
|
570 |
-
"hash_cont_tokens": "1f88b00d41957d82"
|
571 |
-
},
|
572 |
-
"harness|hendrycksTest-college_physics|5": {
|
573 |
-
"hash_examples": "875dd26d22655b0d",
|
574 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
575 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
576 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
577 |
-
},
|
578 |
-
"harness|hendrycksTest-computer_security|5": {
|
579 |
-
"hash_examples": "006451eedc0ededb",
|
580 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
581 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
582 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
583 |
-
},
|
584 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
585 |
-
"hash_examples": "8874ece872d2ca4c",
|
586 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
587 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
588 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
589 |
-
},
|
590 |
-
"harness|hendrycksTest-econometrics|5": {
|
591 |
-
"hash_examples": "64d3623b0bfaa43f",
|
592 |
-
"hash_full_prompts": "612f340fae41338d",
|
593 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
594 |
-
"hash_cont_tokens": "bfb7e3c3c88313f1"
|
595 |
-
},
|
596 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
597 |
-
"hash_examples": "e98f51780c674d7e",
|
598 |
-
"hash_full_prompts": "10275b312d812ae6",
|
599 |
-
"hash_input_tokens": "1590c84291399be8",
|
600 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
601 |
-
},
|
602 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
603 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
604 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
605 |
-
"hash_input_tokens": "3269597f715b0da1",
|
606 |
-
"hash_cont_tokens": "f52691aef15a407b"
|
607 |
-
},
|
608 |
-
"harness|hendrycksTest-formal_logic|5": {
|
609 |
-
"hash_examples": "5a6525665f63ea72",
|
610 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
611 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
612 |
-
"hash_cont_tokens": "f515d598d9c21263"
|
613 |
-
},
|
614 |
-
"harness|hendrycksTest-global_facts|5": {
|
615 |
-
"hash_examples": "371d70d743b2b89b",
|
616 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
617 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
618 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
619 |
-
},
|
620 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
621 |
-
"hash_examples": "a79e1018b1674052",
|
622 |
-
"hash_full_prompts": "e624e26ede922561",
|
623 |
-
"hash_input_tokens": "24423acb928db768",
|
624 |
-
"hash_cont_tokens": "bd85a4156a3613ee"
|
625 |
-
},
|
626 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
627 |
-
"hash_examples": "44bfc25c389f0e03",
|
628 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
629 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
630 |
-
"hash_cont_tokens": "a95c97af1c14e068"
|
631 |
-
},
|
632 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
633 |
-
"hash_examples": "8b8cdb1084f24169",
|
634 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
635 |
-
"hash_input_tokens": "8c34e0f2bda77358",
|
636 |
-
"hash_cont_tokens": "8abfedef914e33c9"
|
637 |
-
},
|
638 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
639 |
-
"hash_examples": "11cd32d0ef440171",
|
640 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
641 |
-
"hash_input_tokens": "f1f73dd687da18d7",
|
642 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
643 |
-
},
|
644 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
645 |
-
"hash_examples": "b60019b9e80b642f",
|
646 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
647 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
648 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
649 |
-
},
|
650 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
651 |
-
"hash_examples": "d221ec983d143dc3",
|
652 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
653 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
654 |
-
"hash_cont_tokens": "a83effb8f76b7d7c"
|
655 |
-
},
|
656 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
657 |
-
"hash_examples": "59c2915cacfd3fbb",
|
658 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
659 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
660 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
661 |
-
},
|
662 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
663 |
-
"hash_examples": "1f8ac897608de342",
|
664 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
665 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
666 |
-
"hash_cont_tokens": "24f5dc613660300b"
|
667 |
-
},
|
668 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
669 |
-
"hash_examples": "ead6a0f2f6c83370",
|
670 |
-
"hash_full_prompts": "bfc393381298609e",
|
671 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
672 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
673 |
-
},
|
674 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
675 |
-
"hash_examples": "c3f2025990afec64",
|
676 |
-
"hash_full_prompts": "fc78b4997e436734",
|
677 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
678 |
-
"hash_cont_tokens": "ba2efcd283e938cc"
|
679 |
-
},
|
680 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
681 |
-
"hash_examples": "21f8aab618f6d636",
|
682 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
683 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
684 |
-
"hash_cont_tokens": "942069cd363844d9"
|
685 |
-
},
|
686 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
687 |
-
"hash_examples": "2386a60a11fc5de3",
|
688 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
689 |
-
"hash_input_tokens": "424b02981230ee83",
|
690 |
-
"hash_cont_tokens": "955ed42b6f7fa019"
|
691 |
-
},
|
692 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
693 |
-
"hash_examples": "74961543be40f04f",
|
694 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
695 |
-
"hash_input_tokens": "50c9ff438c85a69e",
|
696 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
697 |
-
},
|
698 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
699 |
-
"hash_examples": "2ad2f6b7198b2234",
|
700 |
-
"hash_full_prompts": "11845057459afd72",
|
701 |
-
"hash_input_tokens": "054824cc474caef5",
|
702 |
-
"hash_cont_tokens": "9a864184946033ac"
|
703 |
-
},
|
704 |
-
"harness|hendrycksTest-human_aging|5": {
|
705 |
-
"hash_examples": "1a7199dc733e779b",
|
706 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
707 |
-
"hash_input_tokens": "541a75f071dcf579",
|
708 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
709 |
-
},
|
710 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
711 |
-
"hash_examples": "7acb8fdad97f88a6",
|
712 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
713 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
714 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
715 |
-
},
|
716 |
-
"harness|hendrycksTest-international_law|5": {
|
717 |
-
"hash_examples": "1300bfd0dfc59114",
|
718 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
719 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
720 |
-
"hash_cont_tokens": "dc45b45fcda18e5d"
|
721 |
-
},
|
722 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
723 |
-
"hash_examples": "083b1e4904c48dc2",
|
724 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
725 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
726 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
727 |
-
},
|
728 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
729 |
-
"hash_examples": "709128f9926a634c",
|
730 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
731 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
732 |
-
"hash_cont_tokens": "1e80dbd30f6453d5"
|
733 |
-
},
|
734 |
-
"harness|hendrycksTest-machine_learning|5": {
|
735 |
-
"hash_examples": "88f22a636029ae47",
|
736 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
737 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
738 |
-
"hash_cont_tokens": "9b37da7777378ca9"
|
739 |
-
},
|
740 |
-
"harness|hendrycksTest-management|5": {
|
741 |
-
"hash_examples": "8c8a1e07a2151dca",
|
742 |
-
"hash_full_prompts": "f51611f514b265b0",
|
743 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
744 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
745 |
-
},
|
746 |
-
"harness|hendrycksTest-marketing|5": {
|
747 |
-
"hash_examples": "2668953431f91e96",
|
748 |
-
"hash_full_prompts": "77562bef997c7650",
|
749 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
750 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
751 |
-
},
|
752 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
753 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
754 |
-
"hash_full_prompts": "202139046daa118f",
|
755 |
-
"hash_input_tokens": "886ca823b41c094a",
|
756 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
757 |
-
},
|
758 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
759 |
-
"hash_examples": "41adb694024809c2",
|
760 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
761 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
762 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
763 |
-
},
|
764 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
765 |
-
"hash_examples": "3171c13ba3c594c4",
|
766 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
767 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
768 |
-
"hash_cont_tokens": "8badf768f7b0467a"
|
769 |
-
},
|
770 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
771 |
-
"hash_examples": "9873e077e83e0546",
|
772 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
773 |
-
"hash_input_tokens": "3e793631e951f23c",
|
774 |
-
"hash_cont_tokens": "32ae620376b2bbba"
|
775 |
-
},
|
776 |
-
"harness|hendrycksTest-nutrition|5": {
|
777 |
-
"hash_examples": "7db1d8142ec14323",
|
778 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
779 |
-
"hash_input_tokens": "59753c2144ea93af",
|
780 |
-
"hash_cont_tokens": "3071def75bacc404"
|
781 |
-
},
|
782 |
-
"harness|hendrycksTest-philosophy|5": {
|
783 |
-
"hash_examples": "9b455b7d72811cc8",
|
784 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
785 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
786 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
787 |
-
},
|
788 |
-
"harness|hendrycksTest-prehistory|5": {
|
789 |
-
"hash_examples": "8be90d0f538f1560",
|
790 |
-
"hash_full_prompts": "152187949bcd0921",
|
791 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
792 |
-
"hash_cont_tokens": "de469d2b981e32a3"
|
793 |
-
},
|
794 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
795 |
-
"hash_examples": "8d377597916cd07e",
|
796 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
797 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
798 |
-
"hash_cont_tokens": "c46f74d2dfc7b13b"
|
799 |
-
},
|
800 |
-
"harness|hendrycksTest-professional_law|5": {
|
801 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
802 |
-
"hash_full_prompts": "36ac764272bfb182",
|
803 |
-
"hash_input_tokens": "9178e10bd0763ec4",
|
804 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
805 |
-
},
|
806 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
807 |
-
"hash_examples": "b20e4e816c1e383e",
|
808 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
809 |
-
"hash_input_tokens": "f5a22012a54f70ea",
|
810 |
-
"hash_cont_tokens": "fe35cfa9c6ca802e"
|
811 |
-
},
|
812 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
813 |
-
"hash_examples": "d45b73b22f9cc039",
|
814 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
815 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
816 |
-
"hash_cont_tokens": "f020fbddf72c8652"
|
817 |
-
},
|
818 |
-
"harness|hendrycksTest-public_relations|5": {
|
819 |
-
"hash_examples": "0d25072e1761652a",
|
820 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
821 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
822 |
-
"hash_cont_tokens": "568f585a259965c1"
|
823 |
-
},
|
824 |
-
"harness|hendrycksTest-security_studies|5": {
|
825 |
-
"hash_examples": "62bb8197e63d60d4",
|
826 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
827 |
-
"hash_input_tokens": "d49711415961ced7",
|
828 |
-
"hash_cont_tokens": "cc6fd7cccd64cd5d"
|
829 |
-
},
|
830 |
-
"harness|hendrycksTest-sociology|5": {
|
831 |
-
"hash_examples": "e7959df87dea8672",
|
832 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
833 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
834 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
835 |
-
},
|
836 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
837 |
-
"hash_examples": "4a56a01ddca44dca",
|
838 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
839 |
-
"hash_input_tokens": "42054621e718dbee",
|
840 |
-
"hash_cont_tokens": "2568d0e8e36fa959"
|
841 |
-
},
|
842 |
-
"harness|hendrycksTest-virology|5": {
|
843 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
844 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
845 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
846 |
-
"hash_cont_tokens": "926cf60b0891f374"
|
847 |
-
},
|
848 |
-
"harness|hendrycksTest-world_religions|5": {
|
849 |
-
"hash_examples": "3b29cfaf1a81c379",
|
850 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
851 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
852 |
-
"hash_cont_tokens": "c525a5de974c1ea3"
|
853 |
-
},
|
854 |
-
"harness|truthfulqa:mc|0": {
|
855 |
-
"hash_examples": "23176c0531c7b867",
|
856 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
857 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
858 |
-
"hash_cont_tokens": "c014154380b74b9e"
|
859 |
-
}
|
860 |
-
},
|
861 |
-
"config_general": {
|
862 |
-
"model_name": "upstage/llama-30b-instruct",
|
863 |
-
"model_sha": "fea4312379557e8a1e8073965f560798de369edd",
|
864 |
-
"model_dtype": "torch.float16",
|
865 |
-
"lighteval_sha": "43cff840721bd0214adb4e29236a5e2ca1813937",
|
866 |
-
"num_few_shot_default": 0,
|
867 |
-
"num_fewshot_seeds": 1,
|
868 |
-
"override_batch_size": 1,
|
869 |
-
"max_samples": null
|
870 |
-
}
|
871 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-30b-instruct/results_2023-09-17T15-33-08.826830.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/llama-30b-instruct",
|
4 |
-
"model_sha": "7fa861f5779f342ce30a317372b34c69681d717f",
|
5 |
-
"model_size": "60.65 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.19924496644295303,
|
17 |
-
"em_stderr": 0.004090563786479079,
|
18 |
-
"f1": 0.2739314177852351,
|
19 |
-
"f1_stderr": 0.004108459298679424
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.12130401819560273,
|
23 |
-
"acc_stderr": 0.0089928884972756
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.8050513022888713,
|
27 |
-
"acc_stderr": 0.011134099415938278
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.19924496644295303,
|
31 |
-
"em_stderr": 0.004090563786479079,
|
32 |
-
"f1": 0.2739314177852351,
|
33 |
-
"f1_stderr": 0.004108459298679424,
|
34 |
-
"acc": 0.46317766024223705,
|
35 |
-
"acc_stderr": 0.01006349395660694
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"harness|drop|3": 1,
|
40 |
-
"harness|gsm8k|5": 0,
|
41 |
-
"harness|winogrande|5": 0,
|
42 |
-
"all": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "61b608e0b5ceed76",
|
55 |
-
"hash_cont_tokens": "e461525dad158cd7"
|
56 |
-
},
|
57 |
-
"truncated": 1263,
|
58 |
-
"non-truncated": 8273,
|
59 |
-
"padded": 0,
|
60 |
-
"non-padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "51c02228bba5ddc8"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non-truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non-padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non-truncated": 2534,
|
87 |
-
"padded": 2432,
|
88 |
-
"non-padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "80afe720f936f8d2",
|
98 |
-
"hash_cont_tokens": "31f91ae43a1f0a7d"
|
99 |
-
},
|
100 |
-
"total_evaluation_time_secondes": "80444.1055970192",
|
101 |
-
"truncated": 1263,
|
102 |
-
"non-truncated": 12126,
|
103 |
-
"padded": 2432,
|
104 |
-
"non-padded": 10957,
|
105 |
-
"num_truncated_few_shots": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-65b-instruct/results_2023-07-31T16-32-35.958499.json
DELETED
@@ -1,1365 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|arc:challenge|25": {
|
4 |
-
"acc": 0.6527303754266212,
|
5 |
-
"acc_stderr": 0.013913034529620446,
|
6 |
-
"acc_norm": 0.6885665529010239,
|
7 |
-
"acc_norm_stderr": 0.01353247209985094
|
8 |
-
},
|
9 |
-
"harness|hellaswag|10": {
|
10 |
-
"acc": 0.6733718382792272,
|
11 |
-
"acc_stderr": 0.004680215003395924,
|
12 |
-
"acc_norm": 0.8642700657239594,
|
13 |
-
"acc_norm_stderr": 0.003418015843918836
|
14 |
-
},
|
15 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
16 |
-
"acc": 0.31,
|
17 |
-
"acc_stderr": 0.04648231987117316,
|
18 |
-
"acc_norm": 0.31,
|
19 |
-
"acc_norm_stderr": 0.04648231987117316
|
20 |
-
},
|
21 |
-
"harness|hendrycksTest-anatomy|5": {
|
22 |
-
"acc": 0.5555555555555556,
|
23 |
-
"acc_stderr": 0.04292596718256981,
|
24 |
-
"acc_norm": 0.5555555555555556,
|
25 |
-
"acc_norm_stderr": 0.04292596718256981
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-astronomy|5": {
|
28 |
-
"acc": 0.743421052631579,
|
29 |
-
"acc_stderr": 0.0355418036802569,
|
30 |
-
"acc_norm": 0.743421052631579,
|
31 |
-
"acc_norm_stderr": 0.0355418036802569
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-business_ethics|5": {
|
34 |
-
"acc": 0.68,
|
35 |
-
"acc_stderr": 0.046882617226215034,
|
36 |
-
"acc_norm": 0.68,
|
37 |
-
"acc_norm_stderr": 0.046882617226215034
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
40 |
-
"acc": 0.6981132075471698,
|
41 |
-
"acc_stderr": 0.02825420034443866,
|
42 |
-
"acc_norm": 0.6981132075471698,
|
43 |
-
"acc_norm_stderr": 0.02825420034443866
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-college_biology|5": {
|
46 |
-
"acc": 0.7291666666666666,
|
47 |
-
"acc_stderr": 0.03716177437566017,
|
48 |
-
"acc_norm": 0.7291666666666666,
|
49 |
-
"acc_norm_stderr": 0.03716177437566017
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
52 |
-
"acc": 0.47,
|
53 |
-
"acc_stderr": 0.050161355804659205,
|
54 |
-
"acc_norm": 0.47,
|
55 |
-
"acc_norm_stderr": 0.050161355804659205
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
58 |
-
"acc": 0.47,
|
59 |
-
"acc_stderr": 0.050161355804659205,
|
60 |
-
"acc_norm": 0.47,
|
61 |
-
"acc_norm_stderr": 0.050161355804659205
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
64 |
-
"acc": 0.33,
|
65 |
-
"acc_stderr": 0.047258156262526045,
|
66 |
-
"acc_norm": 0.33,
|
67 |
-
"acc_norm_stderr": 0.047258156262526045
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_medicine|5": {
|
70 |
-
"acc": 0.5606936416184971,
|
71 |
-
"acc_stderr": 0.037842719328874674,
|
72 |
-
"acc_norm": 0.5606936416184971,
|
73 |
-
"acc_norm_stderr": 0.037842719328874674
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_physics|5": {
|
76 |
-
"acc": 0.45098039215686275,
|
77 |
-
"acc_stderr": 0.049512182523962625,
|
78 |
-
"acc_norm": 0.45098039215686275,
|
79 |
-
"acc_norm_stderr": 0.049512182523962625
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-computer_security|5": {
|
82 |
-
"acc": 0.74,
|
83 |
-
"acc_stderr": 0.04408440022768078,
|
84 |
-
"acc_norm": 0.74,
|
85 |
-
"acc_norm_stderr": 0.04408440022768078
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
88 |
-
"acc": 0.625531914893617,
|
89 |
-
"acc_stderr": 0.03163910665367291,
|
90 |
-
"acc_norm": 0.625531914893617,
|
91 |
-
"acc_norm_stderr": 0.03163910665367291
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-econometrics|5": {
|
94 |
-
"acc": 0.3684210526315789,
|
95 |
-
"acc_stderr": 0.04537815354939392,
|
96 |
-
"acc_norm": 0.3684210526315789,
|
97 |
-
"acc_norm_stderr": 0.04537815354939392
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
100 |
-
"acc": 0.5793103448275863,
|
101 |
-
"acc_stderr": 0.0411391498118926,
|
102 |
-
"acc_norm": 0.5793103448275863,
|
103 |
-
"acc_norm_stderr": 0.0411391498118926
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
106 |
-
"acc": 0.42063492063492064,
|
107 |
-
"acc_stderr": 0.025424835086923992,
|
108 |
-
"acc_norm": 0.42063492063492064,
|
109 |
-
"acc_norm_stderr": 0.025424835086923992
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-formal_logic|5": {
|
112 |
-
"acc": 0.4444444444444444,
|
113 |
-
"acc_stderr": 0.044444444444444495,
|
114 |
-
"acc_norm": 0.4444444444444444,
|
115 |
-
"acc_norm_stderr": 0.044444444444444495
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-global_facts|5": {
|
118 |
-
"acc": 0.37,
|
119 |
-
"acc_stderr": 0.048523658709391,
|
120 |
-
"acc_norm": 0.37,
|
121 |
-
"acc_norm_stderr": 0.048523658709391
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
124 |
-
"acc": 0.7677419354838709,
|
125 |
-
"acc_stderr": 0.02402225613030823,
|
126 |
-
"acc_norm": 0.7677419354838709,
|
127 |
-
"acc_norm_stderr": 0.02402225613030823
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
130 |
-
"acc": 0.41379310344827586,
|
131 |
-
"acc_stderr": 0.03465304488406795,
|
132 |
-
"acc_norm": 0.41379310344827586,
|
133 |
-
"acc_norm_stderr": 0.03465304488406795
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
136 |
-
"acc": 0.71,
|
137 |
-
"acc_stderr": 0.045604802157206845,
|
138 |
-
"acc_norm": 0.71,
|
139 |
-
"acc_norm_stderr": 0.045604802157206845
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
142 |
-
"acc": 0.8,
|
143 |
-
"acc_stderr": 0.031234752377721175,
|
144 |
-
"acc_norm": 0.8,
|
145 |
-
"acc_norm_stderr": 0.031234752377721175
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
148 |
-
"acc": 0.8434343434343434,
|
149 |
-
"acc_stderr": 0.025890520358141454,
|
150 |
-
"acc_norm": 0.8434343434343434,
|
151 |
-
"acc_norm_stderr": 0.025890520358141454
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
154 |
-
"acc": 0.9015544041450777,
|
155 |
-
"acc_stderr": 0.021500249576033477,
|
156 |
-
"acc_norm": 0.9015544041450777,
|
157 |
-
"acc_norm_stderr": 0.021500249576033477
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
160 |
-
"acc": 0.6538461538461539,
|
161 |
-
"acc_stderr": 0.024121125416941183,
|
162 |
-
"acc_norm": 0.6538461538461539,
|
163 |
-
"acc_norm_stderr": 0.024121125416941183
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
166 |
-
"acc": 0.2962962962962963,
|
167 |
-
"acc_stderr": 0.02784081149587194,
|
168 |
-
"acc_norm": 0.2962962962962963,
|
169 |
-
"acc_norm_stderr": 0.02784081149587194
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
172 |
-
"acc": 0.7058823529411765,
|
173 |
-
"acc_stderr": 0.02959732973097811,
|
174 |
-
"acc_norm": 0.7058823529411765,
|
175 |
-
"acc_norm_stderr": 0.02959732973097811
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
178 |
-
"acc": 0.41721854304635764,
|
179 |
-
"acc_stderr": 0.04026141497634611,
|
180 |
-
"acc_norm": 0.41721854304635764,
|
181 |
-
"acc_norm_stderr": 0.04026141497634611
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
184 |
-
"acc": 0.8275229357798165,
|
185 |
-
"acc_stderr": 0.01619780795684803,
|
186 |
-
"acc_norm": 0.8275229357798165,
|
187 |
-
"acc_norm_stderr": 0.01619780795684803
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
190 |
-
"acc": 0.49537037037037035,
|
191 |
-
"acc_stderr": 0.03409825519163572,
|
192 |
-
"acc_norm": 0.49537037037037035,
|
193 |
-
"acc_norm_stderr": 0.03409825519163572
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
196 |
-
"acc": 0.8480392156862745,
|
197 |
-
"acc_stderr": 0.025195658428931796,
|
198 |
-
"acc_norm": 0.8480392156862745,
|
199 |
-
"acc_norm_stderr": 0.025195658428931796
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
202 |
-
"acc": 0.8565400843881856,
|
203 |
-
"acc_stderr": 0.022818291821017012,
|
204 |
-
"acc_norm": 0.8565400843881856,
|
205 |
-
"acc_norm_stderr": 0.022818291821017012
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-human_aging|5": {
|
208 |
-
"acc": 0.6995515695067265,
|
209 |
-
"acc_stderr": 0.03076935200822915,
|
210 |
-
"acc_norm": 0.6995515695067265,
|
211 |
-
"acc_norm_stderr": 0.03076935200822915
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
214 |
-
"acc": 0.7633587786259542,
|
215 |
-
"acc_stderr": 0.03727673575596914,
|
216 |
-
"acc_norm": 0.7633587786259542,
|
217 |
-
"acc_norm_stderr": 0.03727673575596914
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-international_law|5": {
|
220 |
-
"acc": 0.8347107438016529,
|
221 |
-
"acc_stderr": 0.03390780612972776,
|
222 |
-
"acc_norm": 0.8347107438016529,
|
223 |
-
"acc_norm_stderr": 0.03390780612972776
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
226 |
-
"acc": 0.7962962962962963,
|
227 |
-
"acc_stderr": 0.03893542518824847,
|
228 |
-
"acc_norm": 0.7962962962962963,
|
229 |
-
"acc_norm_stderr": 0.03893542518824847
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
232 |
-
"acc": 0.7607361963190185,
|
233 |
-
"acc_stderr": 0.033519538795212696,
|
234 |
-
"acc_norm": 0.7607361963190185,
|
235 |
-
"acc_norm_stderr": 0.033519538795212696
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-machine_learning|5": {
|
238 |
-
"acc": 0.48214285714285715,
|
239 |
-
"acc_stderr": 0.047427623612430116,
|
240 |
-
"acc_norm": 0.48214285714285715,
|
241 |
-
"acc_norm_stderr": 0.047427623612430116
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-management|5": {
|
244 |
-
"acc": 0.8252427184466019,
|
245 |
-
"acc_stderr": 0.03760178006026621,
|
246 |
-
"acc_norm": 0.8252427184466019,
|
247 |
-
"acc_norm_stderr": 0.03760178006026621
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-marketing|5": {
|
250 |
-
"acc": 0.8803418803418803,
|
251 |
-
"acc_stderr": 0.02126271940040697,
|
252 |
-
"acc_norm": 0.8803418803418803,
|
253 |
-
"acc_norm_stderr": 0.02126271940040697
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
256 |
-
"acc": 0.68,
|
257 |
-
"acc_stderr": 0.04688261722621505,
|
258 |
-
"acc_norm": 0.68,
|
259 |
-
"acc_norm_stderr": 0.04688261722621505
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
262 |
-
"acc": 0.8301404853128991,
|
263 |
-
"acc_stderr": 0.013428186370608294,
|
264 |
-
"acc_norm": 0.8301404853128991,
|
265 |
-
"acc_norm_stderr": 0.013428186370608294
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
268 |
-
"acc": 0.7514450867052023,
|
269 |
-
"acc_stderr": 0.02326752843210017,
|
270 |
-
"acc_norm": 0.7514450867052023,
|
271 |
-
"acc_norm_stderr": 0.02326752843210017
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
274 |
-
"acc": 0.49050279329608937,
|
275 |
-
"acc_stderr": 0.016719484643348752,
|
276 |
-
"acc_norm": 0.49050279329608937,
|
277 |
-
"acc_norm_stderr": 0.016719484643348752
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-nutrition|5": {
|
280 |
-
"acc": 0.7124183006535948,
|
281 |
-
"acc_stderr": 0.02591780611714716,
|
282 |
-
"acc_norm": 0.7124183006535948,
|
283 |
-
"acc_norm_stderr": 0.02591780611714716
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-philosophy|5": {
|
286 |
-
"acc": 0.7202572347266881,
|
287 |
-
"acc_stderr": 0.025494259350694905,
|
288 |
-
"acc_norm": 0.7202572347266881,
|
289 |
-
"acc_norm_stderr": 0.025494259350694905
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-prehistory|5": {
|
292 |
-
"acc": 0.7407407407407407,
|
293 |
-
"acc_stderr": 0.024383665531035454,
|
294 |
-
"acc_norm": 0.7407407407407407,
|
295 |
-
"acc_norm_stderr": 0.024383665531035454
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
298 |
-
"acc": 0.5283687943262412,
|
299 |
-
"acc_stderr": 0.029779450957303055,
|
300 |
-
"acc_norm": 0.5283687943262412,
|
301 |
-
"acc_norm_stderr": 0.029779450957303055
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-professional_law|5": {
|
304 |
-
"acc": 0.49608865710560623,
|
305 |
-
"acc_stderr": 0.012769845366441194,
|
306 |
-
"acc_norm": 0.49608865710560623,
|
307 |
-
"acc_norm_stderr": 0.012769845366441194
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
310 |
-
"acc": 0.6139705882352942,
|
311 |
-
"acc_stderr": 0.029573269134411124,
|
312 |
-
"acc_norm": 0.6139705882352942,
|
313 |
-
"acc_norm_stderr": 0.029573269134411124
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
316 |
-
"acc": 0.696078431372549,
|
317 |
-
"acc_stderr": 0.01860755213127983,
|
318 |
-
"acc_norm": 0.696078431372549,
|
319 |
-
"acc_norm_stderr": 0.01860755213127983
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-public_relations|5": {
|
322 |
-
"acc": 0.7363636363636363,
|
323 |
-
"acc_stderr": 0.04220224692971987,
|
324 |
-
"acc_norm": 0.7363636363636363,
|
325 |
-
"acc_norm_stderr": 0.04220224692971987
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-security_studies|5": {
|
328 |
-
"acc": 0.7795918367346939,
|
329 |
-
"acc_stderr": 0.026537045312145277,
|
330 |
-
"acc_norm": 0.7795918367346939,
|
331 |
-
"acc_norm_stderr": 0.026537045312145277
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-sociology|5": {
|
334 |
-
"acc": 0.8606965174129353,
|
335 |
-
"acc_stderr": 0.02448448716291397,
|
336 |
-
"acc_norm": 0.8606965174129353,
|
337 |
-
"acc_norm_stderr": 0.02448448716291397
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
340 |
-
"acc": 0.82,
|
341 |
-
"acc_stderr": 0.03861229196653694,
|
342 |
-
"acc_norm": 0.82,
|
343 |
-
"acc_norm_stderr": 0.03861229196653694
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-virology|5": {
|
346 |
-
"acc": 0.536144578313253,
|
347 |
-
"acc_stderr": 0.038823108508905954,
|
348 |
-
"acc_norm": 0.536144578313253,
|
349 |
-
"acc_norm_stderr": 0.038823108508905954
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-world_religions|5": {
|
352 |
-
"acc": 0.8011695906432749,
|
353 |
-
"acc_stderr": 0.030611116557432528,
|
354 |
-
"acc_norm": 0.8011695906432749,
|
355 |
-
"acc_norm_stderr": 0.030611116557432528
|
356 |
-
},
|
357 |
-
"harness|truthfulqa:mc|0": {
|
358 |
-
"mc1": 0.42962056303549573,
|
359 |
-
"mc1_stderr": 0.017329234580409095,
|
360 |
-
"mc2": 0.5969914036089322,
|
361 |
-
"mc2_stderr": 0.015215660759560816
|
362 |
-
},
|
363 |
-
"all": {
|
364 |
-
"acc": 0.6482368382814275,
|
365 |
-
"acc_stderr": 0.03259733412834221,
|
366 |
-
"acc_norm": 0.6520797942970061,
|
367 |
-
"acc_norm_stderr": 0.03256949071157532,
|
368 |
-
"mc1": 0.42962056303549573,
|
369 |
-
"mc1_stderr": 0.017329234580409095,
|
370 |
-
"mc2": 0.5969914036089322,
|
371 |
-
"mc2_stderr": 0.015215660759560816
|
372 |
-
}
|
373 |
-
},
|
374 |
-
"versions": {
|
375 |
-
"harness|arc:challenge|25": 0,
|
376 |
-
"harness|hellaswag|10": 0,
|
377 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
378 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
379 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
380 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
381 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
382 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
383 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
384 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
385 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
386 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
387 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
388 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
389 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
390 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
391 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
392 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
393 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
394 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
395 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
396 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
397 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
398 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
399 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
400 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
401 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
402 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
403 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
404 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
405 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
406 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
409 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
410 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
411 |
-
"harness|hendrycksTest-international_law|5": 1,
|
412 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
413 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
414 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
415 |
-
"harness|hendrycksTest-management|5": 1,
|
416 |
-
"harness|hendrycksTest-marketing|5": 1,
|
417 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
418 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
419 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
420 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
421 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
422 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
423 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
424 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
425 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
426 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
427 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
428 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
429 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
430 |
-
"harness|hendrycksTest-sociology|5": 1,
|
431 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
432 |
-
"harness|hendrycksTest-virology|5": 1,
|
433 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
434 |
-
"harness|truthfulqa:mc|0": 1,
|
435 |
-
"all": 0
|
436 |
-
},
|
437 |
-
"config_general": {
|
438 |
-
"model_name": "upstage/llama-65b-instruct",
|
439 |
-
"model_sha": "b95668861dfb7b0abca44ccdbef2db49b2dd8917",
|
440 |
-
"model_dtype": "torch.float16",
|
441 |
-
"lighteval_sha": "03c2fad20ff7f5334c33cfee459024b8d7e4a109",
|
442 |
-
"num_few_shot_default": 0,
|
443 |
-
"num_fewshot_seeds": 1,
|
444 |
-
"override_batch_size": 1,
|
445 |
-
"max_samples": null
|
446 |
-
},
|
447 |
-
"config_tasks": {
|
448 |
-
"harness|arc:challenge": "LM Harness task",
|
449 |
-
"harness|hellaswag": "LM Harness task",
|
450 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
451 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
507 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
508 |
-
},
|
509 |
-
"summary_tasks": {
|
510 |
-
"harness|arc:challenge|25": {
|
511 |
-
"hashes": {
|
512 |
-
"hash_examples": "17b0cae357c0259e",
|
513 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
514 |
-
"hash_input_tokens": "61571bf68d6d89aa",
|
515 |
-
"hash_cont_tokens": "ede2b335438f08e9"
|
516 |
-
},
|
517 |
-
"truncated": 0,
|
518 |
-
"non-truncated": 4687,
|
519 |
-
"padded": 4687,
|
520 |
-
"non-padded": 0,
|
521 |
-
"effective_few_shots": 25.0,
|
522 |
-
"num_truncated_few_shots": 0
|
523 |
-
},
|
524 |
-
"harness|hellaswag|10": {
|
525 |
-
"hashes": {
|
526 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
527 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
528 |
-
"hash_input_tokens": "29906669b1c7054a",
|
529 |
-
"hash_cont_tokens": "b41cf1ad182d68d5"
|
530 |
-
},
|
531 |
-
"truncated": 0,
|
532 |
-
"non-truncated": 40168,
|
533 |
-
"padded": 40113,
|
534 |
-
"non-padded": 55,
|
535 |
-
"effective_few_shots": 10.0,
|
536 |
-
"num_truncated_few_shots": 0
|
537 |
-
},
|
538 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
539 |
-
"hashes": {
|
540 |
-
"hash_examples": "280f9f325b40559a",
|
541 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
542 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
543 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
544 |
-
},
|
545 |
-
"truncated": 0,
|
546 |
-
"non-truncated": 400,
|
547 |
-
"padded": 400,
|
548 |
-
"non-padded": 0,
|
549 |
-
"effective_few_shots": 5.0,
|
550 |
-
"num_truncated_few_shots": 0
|
551 |
-
},
|
552 |
-
"harness|hendrycksTest-anatomy|5": {
|
553 |
-
"hashes": {
|
554 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
555 |
-
"hash_full_prompts": "516f74bef25df620",
|
556 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
557 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
558 |
-
},
|
559 |
-
"truncated": 0,
|
560 |
-
"non-truncated": 540,
|
561 |
-
"padded": 540,
|
562 |
-
"non-padded": 0,
|
563 |
-
"effective_few_shots": 5.0,
|
564 |
-
"num_truncated_few_shots": 0
|
565 |
-
},
|
566 |
-
"harness|hendrycksTest-astronomy|5": {
|
567 |
-
"hashes": {
|
568 |
-
"hash_examples": "7d587b908da4d762",
|
569 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
570 |
-
"hash_input_tokens": "277a7b1fad566940",
|
571 |
-
"hash_cont_tokens": "238bd86950544b29"
|
572 |
-
},
|
573 |
-
"truncated": 0,
|
574 |
-
"non-truncated": 608,
|
575 |
-
"padded": 608,
|
576 |
-
"non-padded": 0,
|
577 |
-
"effective_few_shots": 5.0,
|
578 |
-
"num_truncated_few_shots": 0
|
579 |
-
},
|
580 |
-
"harness|hendrycksTest-business_ethics|5": {
|
581 |
-
"hashes": {
|
582 |
-
"hash_examples": "33e51740670de686",
|
583 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
584 |
-
"hash_input_tokens": "ba552605bc116de5",
|
585 |
-
"hash_cont_tokens": "f9d6d2a7d7e9a041"
|
586 |
-
},
|
587 |
-
"truncated": 0,
|
588 |
-
"non-truncated": 400,
|
589 |
-
"padded": 400,
|
590 |
-
"non-padded": 0,
|
591 |
-
"effective_few_shots": 5.0,
|
592 |
-
"num_truncated_few_shots": 0
|
593 |
-
},
|
594 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
595 |
-
"hashes": {
|
596 |
-
"hash_examples": "f3366dbe7eefffa4",
|
597 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
598 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
599 |
-
"hash_cont_tokens": "6af58623d0d5fbcd"
|
600 |
-
},
|
601 |
-
"truncated": 0,
|
602 |
-
"non-truncated": 1060,
|
603 |
-
"padded": 1060,
|
604 |
-
"non-padded": 0,
|
605 |
-
"effective_few_shots": 5.0,
|
606 |
-
"num_truncated_few_shots": 0
|
607 |
-
},
|
608 |
-
"harness|hendrycksTest-college_biology|5": {
|
609 |
-
"hashes": {
|
610 |
-
"hash_examples": "ca2b6753a0193e7f",
|
611 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
612 |
-
"hash_input_tokens": "da036601573942e2",
|
613 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
614 |
-
},
|
615 |
-
"truncated": 0,
|
616 |
-
"non-truncated": 576,
|
617 |
-
"padded": 576,
|
618 |
-
"non-padded": 0,
|
619 |
-
"effective_few_shots": 5.0,
|
620 |
-
"num_truncated_few_shots": 0
|
621 |
-
},
|
622 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
623 |
-
"hashes": {
|
624 |
-
"hash_examples": "22ff85f1d34f42d1",
|
625 |
-
"hash_full_prompts": "242c9be6da583e95",
|
626 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
627 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
628 |
-
},
|
629 |
-
"truncated": 0,
|
630 |
-
"non-truncated": 400,
|
631 |
-
"padded": 400,
|
632 |
-
"non-padded": 0,
|
633 |
-
"effective_few_shots": 5.0,
|
634 |
-
"num_truncated_few_shots": 0
|
635 |
-
},
|
636 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
637 |
-
"hashes": {
|
638 |
-
"hash_examples": "30318289d717a5cf",
|
639 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
640 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
641 |
-
"hash_cont_tokens": "1ba0c71186b1505e"
|
642 |
-
},
|
643 |
-
"truncated": 0,
|
644 |
-
"non-truncated": 400,
|
645 |
-
"padded": 400,
|
646 |
-
"non-padded": 0,
|
647 |
-
"effective_few_shots": 5.0,
|
648 |
-
"num_truncated_few_shots": 0
|
649 |
-
},
|
650 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
651 |
-
"hashes": {
|
652 |
-
"hash_examples": "4944d1f0b6b5d911",
|
653 |
-
"hash_full_prompts": "770bc4281c973190",
|
654 |
-
"hash_input_tokens": "614054d17109a25d",
|
655 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
656 |
-
},
|
657 |
-
"truncated": 0,
|
658 |
-
"non-truncated": 400,
|
659 |
-
"padded": 400,
|
660 |
-
"non-padded": 0,
|
661 |
-
"effective_few_shots": 5.0,
|
662 |
-
"num_truncated_few_shots": 0
|
663 |
-
},
|
664 |
-
"harness|hendrycksTest-college_medicine|5": {
|
665 |
-
"hashes": {
|
666 |
-
"hash_examples": "dd69cc33381275af",
|
667 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
668 |
-
"hash_input_tokens": "1d633b3cc0524ba8",
|
669 |
-
"hash_cont_tokens": "702fb6d82ff0d6ac"
|
670 |
-
},
|
671 |
-
"truncated": 0,
|
672 |
-
"non-truncated": 692,
|
673 |
-
"padded": 692,
|
674 |
-
"non-padded": 0,
|
675 |
-
"effective_few_shots": 5.0,
|
676 |
-
"num_truncated_few_shots": 0
|
677 |
-
},
|
678 |
-
"harness|hendrycksTest-college_physics|5": {
|
679 |
-
"hashes": {
|
680 |
-
"hash_examples": "875dd26d22655b0d",
|
681 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
682 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
683 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
684 |
-
},
|
685 |
-
"truncated": 0,
|
686 |
-
"non-truncated": 408,
|
687 |
-
"padded": 408,
|
688 |
-
"non-padded": 0,
|
689 |
-
"effective_few_shots": 5.0,
|
690 |
-
"num_truncated_few_shots": 0
|
691 |
-
},
|
692 |
-
"harness|hendrycksTest-computer_security|5": {
|
693 |
-
"hashes": {
|
694 |
-
"hash_examples": "006451eedc0ededb",
|
695 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
696 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
697 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
698 |
-
},
|
699 |
-
"truncated": 0,
|
700 |
-
"non-truncated": 400,
|
701 |
-
"padded": 400,
|
702 |
-
"non-padded": 0,
|
703 |
-
"effective_few_shots": 5.0,
|
704 |
-
"num_truncated_few_shots": 0
|
705 |
-
},
|
706 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
707 |
-
"hashes": {
|
708 |
-
"hash_examples": "8874ece872d2ca4c",
|
709 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
710 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
711 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
712 |
-
},
|
713 |
-
"truncated": 0,
|
714 |
-
"non-truncated": 940,
|
715 |
-
"padded": 940,
|
716 |
-
"non-padded": 0,
|
717 |
-
"effective_few_shots": 5.0,
|
718 |
-
"num_truncated_few_shots": 0
|
719 |
-
},
|
720 |
-
"harness|hendrycksTest-econometrics|5": {
|
721 |
-
"hashes": {
|
722 |
-
"hash_examples": "64d3623b0bfaa43f",
|
723 |
-
"hash_full_prompts": "612f340fae41338d",
|
724 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
725 |
-
"hash_cont_tokens": "a9b1f761089f6acc"
|
726 |
-
},
|
727 |
-
"truncated": 0,
|
728 |
-
"non-truncated": 456,
|
729 |
-
"padded": 456,
|
730 |
-
"non-padded": 0,
|
731 |
-
"effective_few_shots": 5.0,
|
732 |
-
"num_truncated_few_shots": 0
|
733 |
-
},
|
734 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
735 |
-
"hashes": {
|
736 |
-
"hash_examples": "e98f51780c674d7e",
|
737 |
-
"hash_full_prompts": "10275b312d812ae6",
|
738 |
-
"hash_input_tokens": "1590c84291399be8",
|
739 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
740 |
-
},
|
741 |
-
"truncated": 0,
|
742 |
-
"non-truncated": 580,
|
743 |
-
"padded": 580,
|
744 |
-
"non-padded": 0,
|
745 |
-
"effective_few_shots": 5.0,
|
746 |
-
"num_truncated_few_shots": 0
|
747 |
-
},
|
748 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
749 |
-
"hashes": {
|
750 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
751 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
752 |
-
"hash_input_tokens": "3269597f715b0da1",
|
753 |
-
"hash_cont_tokens": "eb2d5002052b5bc5"
|
754 |
-
},
|
755 |
-
"truncated": 0,
|
756 |
-
"non-truncated": 1512,
|
757 |
-
"padded": 1512,
|
758 |
-
"non-padded": 0,
|
759 |
-
"effective_few_shots": 5.0,
|
760 |
-
"num_truncated_few_shots": 0
|
761 |
-
},
|
762 |
-
"harness|hendrycksTest-formal_logic|5": {
|
763 |
-
"hashes": {
|
764 |
-
"hash_examples": "5a6525665f63ea72",
|
765 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
766 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
767 |
-
"hash_cont_tokens": "9b30dc19c9b62f60"
|
768 |
-
},
|
769 |
-
"truncated": 0,
|
770 |
-
"non-truncated": 504,
|
771 |
-
"padded": 504,
|
772 |
-
"non-padded": 0,
|
773 |
-
"effective_few_shots": 5.0,
|
774 |
-
"num_truncated_few_shots": 0
|
775 |
-
},
|
776 |
-
"harness|hendrycksTest-global_facts|5": {
|
777 |
-
"hashes": {
|
778 |
-
"hash_examples": "371d70d743b2b89b",
|
779 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
780 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
781 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
782 |
-
},
|
783 |
-
"truncated": 0,
|
784 |
-
"non-truncated": 400,
|
785 |
-
"padded": 400,
|
786 |
-
"non-padded": 0,
|
787 |
-
"effective_few_shots": 5.0,
|
788 |
-
"num_truncated_few_shots": 0
|
789 |
-
},
|
790 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
791 |
-
"hashes": {
|
792 |
-
"hash_examples": "a79e1018b1674052",
|
793 |
-
"hash_full_prompts": "e624e26ede922561",
|
794 |
-
"hash_input_tokens": "24423acb928db768",
|
795 |
-
"hash_cont_tokens": "74217a4e2868536f"
|
796 |
-
},
|
797 |
-
"truncated": 0,
|
798 |
-
"non-truncated": 1240,
|
799 |
-
"padded": 1240,
|
800 |
-
"non-padded": 0,
|
801 |
-
"effective_few_shots": 5.0,
|
802 |
-
"num_truncated_few_shots": 0
|
803 |
-
},
|
804 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
805 |
-
"hashes": {
|
806 |
-
"hash_examples": "44bfc25c389f0e03",
|
807 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
808 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
809 |
-
"hash_cont_tokens": "bf39544be0ebf000"
|
810 |
-
},
|
811 |
-
"truncated": 0,
|
812 |
-
"non-truncated": 812,
|
813 |
-
"padded": 812,
|
814 |
-
"non-padded": 0,
|
815 |
-
"effective_few_shots": 5.0,
|
816 |
-
"num_truncated_few_shots": 0
|
817 |
-
},
|
818 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
819 |
-
"hashes": {
|
820 |
-
"hash_examples": "8b8cdb1084f24169",
|
821 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
822 |
-
"hash_input_tokens": "8c34e0f2bda77358",
|
823 |
-
"hash_cont_tokens": "43570b3948564b64"
|
824 |
-
},
|
825 |
-
"truncated": 0,
|
826 |
-
"non-truncated": 400,
|
827 |
-
"padded": 400,
|
828 |
-
"non-padded": 0,
|
829 |
-
"effective_few_shots": 5.0,
|
830 |
-
"num_truncated_few_shots": 0
|
831 |
-
},
|
832 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
833 |
-
"hashes": {
|
834 |
-
"hash_examples": "11cd32d0ef440171",
|
835 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
836 |
-
"hash_input_tokens": "f1f73dd687da18d7",
|
837 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
838 |
-
},
|
839 |
-
"truncated": 660,
|
840 |
-
"non-truncated": 0,
|
841 |
-
"padded": 0,
|
842 |
-
"non-padded": 660,
|
843 |
-
"effective_few_shots": 5.0,
|
844 |
-
"num_truncated_few_shots": 0
|
845 |
-
},
|
846 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
847 |
-
"hashes": {
|
848 |
-
"hash_examples": "b60019b9e80b642f",
|
849 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
850 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
851 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
852 |
-
},
|
853 |
-
"truncated": 0,
|
854 |
-
"non-truncated": 792,
|
855 |
-
"padded": 792,
|
856 |
-
"non-padded": 0,
|
857 |
-
"effective_few_shots": 5.0,
|
858 |
-
"num_truncated_few_shots": 0
|
859 |
-
},
|
860 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
861 |
-
"hashes": {
|
862 |
-
"hash_examples": "d221ec983d143dc3",
|
863 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
864 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
865 |
-
"hash_cont_tokens": "50ab225c2f535210"
|
866 |
-
},
|
867 |
-
"truncated": 0,
|
868 |
-
"non-truncated": 772,
|
869 |
-
"padded": 772,
|
870 |
-
"non-padded": 0,
|
871 |
-
"effective_few_shots": 5.0,
|
872 |
-
"num_truncated_few_shots": 0
|
873 |
-
},
|
874 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
875 |
-
"hashes": {
|
876 |
-
"hash_examples": "59c2915cacfd3fbb",
|
877 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
878 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
879 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
880 |
-
},
|
881 |
-
"truncated": 0,
|
882 |
-
"non-truncated": 1560,
|
883 |
-
"padded": 1560,
|
884 |
-
"non-padded": 0,
|
885 |
-
"effective_few_shots": 5.0,
|
886 |
-
"num_truncated_few_shots": 0
|
887 |
-
},
|
888 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
889 |
-
"hashes": {
|
890 |
-
"hash_examples": "1f8ac897608de342",
|
891 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
892 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
893 |
-
"hash_cont_tokens": "1194078d4e38c984"
|
894 |
-
},
|
895 |
-
"truncated": 0,
|
896 |
-
"non-truncated": 1080,
|
897 |
-
"padded": 1080,
|
898 |
-
"non-padded": 0,
|
899 |
-
"effective_few_shots": 5.0,
|
900 |
-
"num_truncated_few_shots": 0
|
901 |
-
},
|
902 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
903 |
-
"hashes": {
|
904 |
-
"hash_examples": "ead6a0f2f6c83370",
|
905 |
-
"hash_full_prompts": "bfc393381298609e",
|
906 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
907 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
908 |
-
},
|
909 |
-
"truncated": 0,
|
910 |
-
"non-truncated": 952,
|
911 |
-
"padded": 952,
|
912 |
-
"non-padded": 0,
|
913 |
-
"effective_few_shots": 5.0,
|
914 |
-
"num_truncated_few_shots": 0
|
915 |
-
},
|
916 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
917 |
-
"hashes": {
|
918 |
-
"hash_examples": "c3f2025990afec64",
|
919 |
-
"hash_full_prompts": "fc78b4997e436734",
|
920 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
921 |
-
"hash_cont_tokens": "6296151cf7fee15c"
|
922 |
-
},
|
923 |
-
"truncated": 0,
|
924 |
-
"non-truncated": 604,
|
925 |
-
"padded": 604,
|
926 |
-
"non-padded": 0,
|
927 |
-
"effective_few_shots": 5.0,
|
928 |
-
"num_truncated_few_shots": 0
|
929 |
-
},
|
930 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
931 |
-
"hashes": {
|
932 |
-
"hash_examples": "21f8aab618f6d636",
|
933 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
934 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
935 |
-
"hash_cont_tokens": "a490d3db0ea5935a"
|
936 |
-
},
|
937 |
-
"truncated": 0,
|
938 |
-
"non-truncated": 2180,
|
939 |
-
"padded": 2180,
|
940 |
-
"non-padded": 0,
|
941 |
-
"effective_few_shots": 5.0,
|
942 |
-
"num_truncated_few_shots": 0
|
943 |
-
},
|
944 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
945 |
-
"hashes": {
|
946 |
-
"hash_examples": "2386a60a11fc5de3",
|
947 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
948 |
-
"hash_input_tokens": "424b02981230ee83",
|
949 |
-
"hash_cont_tokens": "6830ef7d0325d7ef"
|
950 |
-
},
|
951 |
-
"truncated": 0,
|
952 |
-
"non-truncated": 864,
|
953 |
-
"padded": 864,
|
954 |
-
"non-padded": 0,
|
955 |
-
"effective_few_shots": 5.0,
|
956 |
-
"num_truncated_few_shots": 0
|
957 |
-
},
|
958 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
959 |
-
"hashes": {
|
960 |
-
"hash_examples": "74961543be40f04f",
|
961 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
962 |
-
"hash_input_tokens": "50c9ff438c85a69e",
|
963 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
964 |
-
},
|
965 |
-
"truncated": 816,
|
966 |
-
"non-truncated": 0,
|
967 |
-
"padded": 0,
|
968 |
-
"non-padded": 816,
|
969 |
-
"effective_few_shots": 5.0,
|
970 |
-
"num_truncated_few_shots": 0
|
971 |
-
},
|
972 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
973 |
-
"hashes": {
|
974 |
-
"hash_examples": "2ad2f6b7198b2234",
|
975 |
-
"hash_full_prompts": "11845057459afd72",
|
976 |
-
"hash_input_tokens": "054824cc474caef5",
|
977 |
-
"hash_cont_tokens": "e0203e3fc1bb0500"
|
978 |
-
},
|
979 |
-
"truncated": 8,
|
980 |
-
"non-truncated": 940,
|
981 |
-
"padded": 940,
|
982 |
-
"non-padded": 8,
|
983 |
-
"effective_few_shots": 5.0,
|
984 |
-
"num_truncated_few_shots": 0
|
985 |
-
},
|
986 |
-
"harness|hendrycksTest-human_aging|5": {
|
987 |
-
"hashes": {
|
988 |
-
"hash_examples": "1a7199dc733e779b",
|
989 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
990 |
-
"hash_input_tokens": "541a75f071dcf579",
|
991 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
992 |
-
},
|
993 |
-
"truncated": 0,
|
994 |
-
"non-truncated": 892,
|
995 |
-
"padded": 892,
|
996 |
-
"non-padded": 0,
|
997 |
-
"effective_few_shots": 5.0,
|
998 |
-
"num_truncated_few_shots": 0
|
999 |
-
},
|
1000 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1001 |
-
"hashes": {
|
1002 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1003 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1004 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
1005 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
1006 |
-
},
|
1007 |
-
"truncated": 0,
|
1008 |
-
"non-truncated": 524,
|
1009 |
-
"padded": 524,
|
1010 |
-
"non-padded": 0,
|
1011 |
-
"effective_few_shots": 5.0,
|
1012 |
-
"num_truncated_few_shots": 0
|
1013 |
-
},
|
1014 |
-
"harness|hendrycksTest-international_law|5": {
|
1015 |
-
"hashes": {
|
1016 |
-
"hash_examples": "1300bfd0dfc59114",
|
1017 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1018 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
1019 |
-
"hash_cont_tokens": "63435df622d5437b"
|
1020 |
-
},
|
1021 |
-
"truncated": 0,
|
1022 |
-
"non-truncated": 484,
|
1023 |
-
"padded": 484,
|
1024 |
-
"non-padded": 0,
|
1025 |
-
"effective_few_shots": 5.0,
|
1026 |
-
"num_truncated_few_shots": 0
|
1027 |
-
},
|
1028 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1029 |
-
"hashes": {
|
1030 |
-
"hash_examples": "083b1e4904c48dc2",
|
1031 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1032 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
1033 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
1034 |
-
},
|
1035 |
-
"truncated": 0,
|
1036 |
-
"non-truncated": 432,
|
1037 |
-
"padded": 432,
|
1038 |
-
"non-padded": 0,
|
1039 |
-
"effective_few_shots": 5.0,
|
1040 |
-
"num_truncated_few_shots": 0
|
1041 |
-
},
|
1042 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1043 |
-
"hashes": {
|
1044 |
-
"hash_examples": "709128f9926a634c",
|
1045 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1046 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
1047 |
-
"hash_cont_tokens": "5e6ee2ff0404f23c"
|
1048 |
-
},
|
1049 |
-
"truncated": 0,
|
1050 |
-
"non-truncated": 652,
|
1051 |
-
"padded": 648,
|
1052 |
-
"non-padded": 4,
|
1053 |
-
"effective_few_shots": 5.0,
|
1054 |
-
"num_truncated_few_shots": 0
|
1055 |
-
},
|
1056 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1057 |
-
"hashes": {
|
1058 |
-
"hash_examples": "88f22a636029ae47",
|
1059 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1060 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
1061 |
-
"hash_cont_tokens": "c81919424db3b267"
|
1062 |
-
},
|
1063 |
-
"truncated": 0,
|
1064 |
-
"non-truncated": 448,
|
1065 |
-
"padded": 448,
|
1066 |
-
"non-padded": 0,
|
1067 |
-
"effective_few_shots": 5.0,
|
1068 |
-
"num_truncated_few_shots": 0
|
1069 |
-
},
|
1070 |
-
"harness|hendrycksTest-management|5": {
|
1071 |
-
"hashes": {
|
1072 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1073 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1074 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
1075 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
1076 |
-
},
|
1077 |
-
"truncated": 0,
|
1078 |
-
"non-truncated": 412,
|
1079 |
-
"padded": 412,
|
1080 |
-
"non-padded": 0,
|
1081 |
-
"effective_few_shots": 5.0,
|
1082 |
-
"num_truncated_few_shots": 0
|
1083 |
-
},
|
1084 |
-
"harness|hendrycksTest-marketing|5": {
|
1085 |
-
"hashes": {
|
1086 |
-
"hash_examples": "2668953431f91e96",
|
1087 |
-
"hash_full_prompts": "77562bef997c7650",
|
1088 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
1089 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
1090 |
-
},
|
1091 |
-
"truncated": 0,
|
1092 |
-
"non-truncated": 936,
|
1093 |
-
"padded": 936,
|
1094 |
-
"non-padded": 0,
|
1095 |
-
"effective_few_shots": 5.0,
|
1096 |
-
"num_truncated_few_shots": 0
|
1097 |
-
},
|
1098 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1099 |
-
"hashes": {
|
1100 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1101 |
-
"hash_full_prompts": "202139046daa118f",
|
1102 |
-
"hash_input_tokens": "886ca823b41c094a",
|
1103 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
1104 |
-
},
|
1105 |
-
"truncated": 0,
|
1106 |
-
"non-truncated": 400,
|
1107 |
-
"padded": 400,
|
1108 |
-
"non-padded": 0,
|
1109 |
-
"effective_few_shots": 5.0,
|
1110 |
-
"num_truncated_few_shots": 0
|
1111 |
-
},
|
1112 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1113 |
-
"hashes": {
|
1114 |
-
"hash_examples": "41adb694024809c2",
|
1115 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1116 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
1117 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
1118 |
-
},
|
1119 |
-
"truncated": 0,
|
1120 |
-
"non-truncated": 3132,
|
1121 |
-
"padded": 3132,
|
1122 |
-
"non-padded": 0,
|
1123 |
-
"effective_few_shots": 5.0,
|
1124 |
-
"num_truncated_few_shots": 0
|
1125 |
-
},
|
1126 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1127 |
-
"hashes": {
|
1128 |
-
"hash_examples": "3171c13ba3c594c4",
|
1129 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1130 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
1131 |
-
"hash_cont_tokens": "3b8bbe9108e55ce9"
|
1132 |
-
},
|
1133 |
-
"truncated": 0,
|
1134 |
-
"non-truncated": 1384,
|
1135 |
-
"padded": 1354,
|
1136 |
-
"non-padded": 30,
|
1137 |
-
"effective_few_shots": 5.0,
|
1138 |
-
"num_truncated_few_shots": 0
|
1139 |
-
},
|
1140 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1141 |
-
"hashes": {
|
1142 |
-
"hash_examples": "9873e077e83e0546",
|
1143 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1144 |
-
"hash_input_tokens": "3e793631e951f23c",
|
1145 |
-
"hash_cont_tokens": "2eae753a177d5460"
|
1146 |
-
},
|
1147 |
-
"truncated": 0,
|
1148 |
-
"non-truncated": 3580,
|
1149 |
-
"padded": 3580,
|
1150 |
-
"non-padded": 0,
|
1151 |
-
"effective_few_shots": 5.0,
|
1152 |
-
"num_truncated_few_shots": 0
|
1153 |
-
},
|
1154 |
-
"harness|hendrycksTest-nutrition|5": {
|
1155 |
-
"hashes": {
|
1156 |
-
"hash_examples": "7db1d8142ec14323",
|
1157 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1158 |
-
"hash_input_tokens": "59753c2144ea93af",
|
1159 |
-
"hash_cont_tokens": "29771089bd3c65c6"
|
1160 |
-
},
|
1161 |
-
"truncated": 0,
|
1162 |
-
"non-truncated": 1224,
|
1163 |
-
"padded": 1224,
|
1164 |
-
"non-padded": 0,
|
1165 |
-
"effective_few_shots": 5.0,
|
1166 |
-
"num_truncated_few_shots": 0
|
1167 |
-
},
|
1168 |
-
"harness|hendrycksTest-philosophy|5": {
|
1169 |
-
"hashes": {
|
1170 |
-
"hash_examples": "9b455b7d72811cc8",
|
1171 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1172 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
1173 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
1174 |
-
},
|
1175 |
-
"truncated": 0,
|
1176 |
-
"non-truncated": 1244,
|
1177 |
-
"padded": 1244,
|
1178 |
-
"non-padded": 0,
|
1179 |
-
"effective_few_shots": 5.0,
|
1180 |
-
"num_truncated_few_shots": 0
|
1181 |
-
},
|
1182 |
-
"harness|hendrycksTest-prehistory|5": {
|
1183 |
-
"hashes": {
|
1184 |
-
"hash_examples": "8be90d0f538f1560",
|
1185 |
-
"hash_full_prompts": "152187949bcd0921",
|
1186 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
1187 |
-
"hash_cont_tokens": "a789a13af22308bf"
|
1188 |
-
},
|
1189 |
-
"truncated": 0,
|
1190 |
-
"non-truncated": 1296,
|
1191 |
-
"padded": 1296,
|
1192 |
-
"non-padded": 0,
|
1193 |
-
"effective_few_shots": 5.0,
|
1194 |
-
"num_truncated_few_shots": 0
|
1195 |
-
},
|
1196 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1197 |
-
"hashes": {
|
1198 |
-
"hash_examples": "8d377597916cd07e",
|
1199 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1200 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
1201 |
-
"hash_cont_tokens": "5129a9cfb30c5239"
|
1202 |
-
},
|
1203 |
-
"truncated": 0,
|
1204 |
-
"non-truncated": 1128,
|
1205 |
-
"padded": 1128,
|
1206 |
-
"non-padded": 0,
|
1207 |
-
"effective_few_shots": 5.0,
|
1208 |
-
"num_truncated_few_shots": 0
|
1209 |
-
},
|
1210 |
-
"harness|hendrycksTest-professional_law|5": {
|
1211 |
-
"hashes": {
|
1212 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1213 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1214 |
-
"hash_input_tokens": "9178e10bd0763ec4",
|
1215 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
1216 |
-
},
|
1217 |
-
"truncated": 604,
|
1218 |
-
"non-truncated": 5532,
|
1219 |
-
"padded": 5524,
|
1220 |
-
"non-padded": 612,
|
1221 |
-
"effective_few_shots": 5.0,
|
1222 |
-
"num_truncated_few_shots": 0
|
1223 |
-
},
|
1224 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1225 |
-
"hashes": {
|
1226 |
-
"hash_examples": "b20e4e816c1e383e",
|
1227 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1228 |
-
"hash_input_tokens": "f5a22012a54f70ea",
|
1229 |
-
"hash_cont_tokens": "cd82e108370cece8"
|
1230 |
-
},
|
1231 |
-
"truncated": 0,
|
1232 |
-
"non-truncated": 1088,
|
1233 |
-
"padded": 1088,
|
1234 |
-
"non-padded": 0,
|
1235 |
-
"effective_few_shots": 5.0,
|
1236 |
-
"num_truncated_few_shots": 0
|
1237 |
-
},
|
1238 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1239 |
-
"hashes": {
|
1240 |
-
"hash_examples": "d45b73b22f9cc039",
|
1241 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1242 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
1243 |
-
"hash_cont_tokens": "61ef0c8a87f9c92d"
|
1244 |
-
},
|
1245 |
-
"truncated": 0,
|
1246 |
-
"non-truncated": 2448,
|
1247 |
-
"padded": 2448,
|
1248 |
-
"non-padded": 0,
|
1249 |
-
"effective_few_shots": 5.0,
|
1250 |
-
"num_truncated_few_shots": 0
|
1251 |
-
},
|
1252 |
-
"harness|hendrycksTest-public_relations|5": {
|
1253 |
-
"hashes": {
|
1254 |
-
"hash_examples": "0d25072e1761652a",
|
1255 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1256 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
1257 |
-
"hash_cont_tokens": "568f585a259965c1"
|
1258 |
-
},
|
1259 |
-
"truncated": 0,
|
1260 |
-
"non-truncated": 440,
|
1261 |
-
"padded": 440,
|
1262 |
-
"non-padded": 0,
|
1263 |
-
"effective_few_shots": 5.0,
|
1264 |
-
"num_truncated_few_shots": 0
|
1265 |
-
},
|
1266 |
-
"harness|hendrycksTest-security_studies|5": {
|
1267 |
-
"hashes": {
|
1268 |
-
"hash_examples": "62bb8197e63d60d4",
|
1269 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1270 |
-
"hash_input_tokens": "d49711415961ced7",
|
1271 |
-
"hash_cont_tokens": "d70cfe096d4fb7bd"
|
1272 |
-
},
|
1273 |
-
"truncated": 0,
|
1274 |
-
"non-truncated": 980,
|
1275 |
-
"padded": 980,
|
1276 |
-
"non-padded": 0,
|
1277 |
-
"effective_few_shots": 5.0,
|
1278 |
-
"num_truncated_few_shots": 0
|
1279 |
-
},
|
1280 |
-
"harness|hendrycksTest-sociology|5": {
|
1281 |
-
"hashes": {
|
1282 |
-
"hash_examples": "e7959df87dea8672",
|
1283 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1284 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
1285 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
1286 |
-
},
|
1287 |
-
"truncated": 0,
|
1288 |
-
"non-truncated": 804,
|
1289 |
-
"padded": 804,
|
1290 |
-
"non-padded": 0,
|
1291 |
-
"effective_few_shots": 5.0,
|
1292 |
-
"num_truncated_few_shots": 0
|
1293 |
-
},
|
1294 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1295 |
-
"hashes": {
|
1296 |
-
"hash_examples": "4a56a01ddca44dca",
|
1297 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1298 |
-
"hash_input_tokens": "42054621e718dbee",
|
1299 |
-
"hash_cont_tokens": "2568d0e8e36fa959"
|
1300 |
-
},
|
1301 |
-
"truncated": 0,
|
1302 |
-
"non-truncated": 400,
|
1303 |
-
"padded": 400,
|
1304 |
-
"non-padded": 0,
|
1305 |
-
"effective_few_shots": 5.0,
|
1306 |
-
"num_truncated_few_shots": 0
|
1307 |
-
},
|
1308 |
-
"harness|hendrycksTest-virology|5": {
|
1309 |
-
"hashes": {
|
1310 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1311 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1312 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
1313 |
-
"hash_cont_tokens": "c178cccd753d9bc5"
|
1314 |
-
},
|
1315 |
-
"truncated": 0,
|
1316 |
-
"non-truncated": 664,
|
1317 |
-
"padded": 664,
|
1318 |
-
"non-padded": 0,
|
1319 |
-
"effective_few_shots": 5.0,
|
1320 |
-
"num_truncated_few_shots": 0
|
1321 |
-
},
|
1322 |
-
"harness|hendrycksTest-world_religions|5": {
|
1323 |
-
"hashes": {
|
1324 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1325 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1326 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
1327 |
-
"hash_cont_tokens": "0a3a3ea5ef49d19c"
|
1328 |
-
},
|
1329 |
-
"truncated": 0,
|
1330 |
-
"non-truncated": 684,
|
1331 |
-
"padded": 684,
|
1332 |
-
"non-padded": 0,
|
1333 |
-
"effective_few_shots": 5.0,
|
1334 |
-
"num_truncated_few_shots": 0
|
1335 |
-
},
|
1336 |
-
"harness|truthfulqa:mc|0": {
|
1337 |
-
"hashes": {
|
1338 |
-
"hash_examples": "23176c0531c7b867",
|
1339 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1340 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
1341 |
-
"hash_cont_tokens": "6d1691881e252df0"
|
1342 |
-
},
|
1343 |
-
"truncated": 0,
|
1344 |
-
"non-truncated": 9996,
|
1345 |
-
"padded": 9996,
|
1346 |
-
"non-padded": 0,
|
1347 |
-
"effective_few_shots": 0.0,
|
1348 |
-
"num_truncated_few_shots": 0
|
1349 |
-
}
|
1350 |
-
},
|
1351 |
-
"summary_general": {
|
1352 |
-
"hashes": {
|
1353 |
-
"hash_examples": "d84d18e9a963753d",
|
1354 |
-
"hash_full_prompts": "12b540783521a8e6",
|
1355 |
-
"hash_input_tokens": "6fecf578c508db6a",
|
1356 |
-
"hash_cont_tokens": "f4b7b7f3a2788768"
|
1357 |
-
},
|
1358 |
-
"total_evaluation_time_secondes": "25515.490225315094",
|
1359 |
-
"truncated": 2088,
|
1360 |
-
"non-truncated": 108931,
|
1361 |
-
"padded": 108834,
|
1362 |
-
"non-padded": 2185,
|
1363 |
-
"num_truncated_few_shots": 0
|
1364 |
-
}
|
1365 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-65b-instruct/results_2023-08-14T23-57-10.007854.json
DELETED
@@ -1,1365 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|arc:challenge|25": {
|
4 |
-
"acc": 0.6527303754266212,
|
5 |
-
"acc_stderr": 0.013913034529620446,
|
6 |
-
"acc_norm": 0.6885665529010239,
|
7 |
-
"acc_norm_stderr": 0.01353247209985094
|
8 |
-
},
|
9 |
-
"harness|hellaswag|10": {
|
10 |
-
"acc": 0.6733718382792272,
|
11 |
-
"acc_stderr": 0.004680215003395924,
|
12 |
-
"acc_norm": 0.8642700657239594,
|
13 |
-
"acc_norm_stderr": 0.003418015843918836
|
14 |
-
},
|
15 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
16 |
-
"acc": 0.31,
|
17 |
-
"acc_stderr": 0.04648231987117316,
|
18 |
-
"acc_norm": 0.31,
|
19 |
-
"acc_norm_stderr": 0.04648231987117316
|
20 |
-
},
|
21 |
-
"harness|hendrycksTest-anatomy|5": {
|
22 |
-
"acc": 0.5555555555555556,
|
23 |
-
"acc_stderr": 0.04292596718256981,
|
24 |
-
"acc_norm": 0.5555555555555556,
|
25 |
-
"acc_norm_stderr": 0.04292596718256981
|
26 |
-
},
|
27 |
-
"harness|hendrycksTest-astronomy|5": {
|
28 |
-
"acc": 0.743421052631579,
|
29 |
-
"acc_stderr": 0.0355418036802569,
|
30 |
-
"acc_norm": 0.743421052631579,
|
31 |
-
"acc_norm_stderr": 0.0355418036802569
|
32 |
-
},
|
33 |
-
"harness|hendrycksTest-business_ethics|5": {
|
34 |
-
"acc": 0.68,
|
35 |
-
"acc_stderr": 0.046882617226215034,
|
36 |
-
"acc_norm": 0.68,
|
37 |
-
"acc_norm_stderr": 0.046882617226215034
|
38 |
-
},
|
39 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
40 |
-
"acc": 0.6981132075471698,
|
41 |
-
"acc_stderr": 0.02825420034443866,
|
42 |
-
"acc_norm": 0.6981132075471698,
|
43 |
-
"acc_norm_stderr": 0.02825420034443866
|
44 |
-
},
|
45 |
-
"harness|hendrycksTest-college_biology|5": {
|
46 |
-
"acc": 0.7291666666666666,
|
47 |
-
"acc_stderr": 0.03716177437566017,
|
48 |
-
"acc_norm": 0.7291666666666666,
|
49 |
-
"acc_norm_stderr": 0.03716177437566017
|
50 |
-
},
|
51 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
52 |
-
"acc": 0.47,
|
53 |
-
"acc_stderr": 0.050161355804659205,
|
54 |
-
"acc_norm": 0.47,
|
55 |
-
"acc_norm_stderr": 0.050161355804659205
|
56 |
-
},
|
57 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
58 |
-
"acc": 0.47,
|
59 |
-
"acc_stderr": 0.050161355804659205,
|
60 |
-
"acc_norm": 0.47,
|
61 |
-
"acc_norm_stderr": 0.050161355804659205
|
62 |
-
},
|
63 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
64 |
-
"acc": 0.33,
|
65 |
-
"acc_stderr": 0.047258156262526045,
|
66 |
-
"acc_norm": 0.33,
|
67 |
-
"acc_norm_stderr": 0.047258156262526045
|
68 |
-
},
|
69 |
-
"harness|hendrycksTest-college_medicine|5": {
|
70 |
-
"acc": 0.5606936416184971,
|
71 |
-
"acc_stderr": 0.037842719328874674,
|
72 |
-
"acc_norm": 0.5606936416184971,
|
73 |
-
"acc_norm_stderr": 0.037842719328874674
|
74 |
-
},
|
75 |
-
"harness|hendrycksTest-college_physics|5": {
|
76 |
-
"acc": 0.45098039215686275,
|
77 |
-
"acc_stderr": 0.049512182523962625,
|
78 |
-
"acc_norm": 0.45098039215686275,
|
79 |
-
"acc_norm_stderr": 0.049512182523962625
|
80 |
-
},
|
81 |
-
"harness|hendrycksTest-computer_security|5": {
|
82 |
-
"acc": 0.74,
|
83 |
-
"acc_stderr": 0.04408440022768078,
|
84 |
-
"acc_norm": 0.74,
|
85 |
-
"acc_norm_stderr": 0.04408440022768078
|
86 |
-
},
|
87 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
88 |
-
"acc": 0.625531914893617,
|
89 |
-
"acc_stderr": 0.03163910665367291,
|
90 |
-
"acc_norm": 0.625531914893617,
|
91 |
-
"acc_norm_stderr": 0.03163910665367291
|
92 |
-
},
|
93 |
-
"harness|hendrycksTest-econometrics|5": {
|
94 |
-
"acc": 0.3684210526315789,
|
95 |
-
"acc_stderr": 0.04537815354939392,
|
96 |
-
"acc_norm": 0.3684210526315789,
|
97 |
-
"acc_norm_stderr": 0.04537815354939392
|
98 |
-
},
|
99 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
100 |
-
"acc": 0.5793103448275863,
|
101 |
-
"acc_stderr": 0.0411391498118926,
|
102 |
-
"acc_norm": 0.5793103448275863,
|
103 |
-
"acc_norm_stderr": 0.0411391498118926
|
104 |
-
},
|
105 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
106 |
-
"acc": 0.42063492063492064,
|
107 |
-
"acc_stderr": 0.025424835086923992,
|
108 |
-
"acc_norm": 0.42063492063492064,
|
109 |
-
"acc_norm_stderr": 0.025424835086923992
|
110 |
-
},
|
111 |
-
"harness|hendrycksTest-formal_logic|5": {
|
112 |
-
"acc": 0.4444444444444444,
|
113 |
-
"acc_stderr": 0.044444444444444495,
|
114 |
-
"acc_norm": 0.4444444444444444,
|
115 |
-
"acc_norm_stderr": 0.044444444444444495
|
116 |
-
},
|
117 |
-
"harness|hendrycksTest-global_facts|5": {
|
118 |
-
"acc": 0.37,
|
119 |
-
"acc_stderr": 0.048523658709391,
|
120 |
-
"acc_norm": 0.37,
|
121 |
-
"acc_norm_stderr": 0.048523658709391
|
122 |
-
},
|
123 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
124 |
-
"acc": 0.7677419354838709,
|
125 |
-
"acc_stderr": 0.02402225613030823,
|
126 |
-
"acc_norm": 0.7677419354838709,
|
127 |
-
"acc_norm_stderr": 0.02402225613030823
|
128 |
-
},
|
129 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
130 |
-
"acc": 0.41379310344827586,
|
131 |
-
"acc_stderr": 0.03465304488406795,
|
132 |
-
"acc_norm": 0.41379310344827586,
|
133 |
-
"acc_norm_stderr": 0.03465304488406795
|
134 |
-
},
|
135 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
136 |
-
"acc": 0.71,
|
137 |
-
"acc_stderr": 0.045604802157206845,
|
138 |
-
"acc_norm": 0.71,
|
139 |
-
"acc_norm_stderr": 0.045604802157206845
|
140 |
-
},
|
141 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
142 |
-
"acc": 0.8,
|
143 |
-
"acc_stderr": 0.031234752377721175,
|
144 |
-
"acc_norm": 0.8,
|
145 |
-
"acc_norm_stderr": 0.031234752377721175
|
146 |
-
},
|
147 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
148 |
-
"acc": 0.8434343434343434,
|
149 |
-
"acc_stderr": 0.025890520358141454,
|
150 |
-
"acc_norm": 0.8434343434343434,
|
151 |
-
"acc_norm_stderr": 0.025890520358141454
|
152 |
-
},
|
153 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
154 |
-
"acc": 0.9015544041450777,
|
155 |
-
"acc_stderr": 0.021500249576033477,
|
156 |
-
"acc_norm": 0.9015544041450777,
|
157 |
-
"acc_norm_stderr": 0.021500249576033477
|
158 |
-
},
|
159 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
160 |
-
"acc": 0.6538461538461539,
|
161 |
-
"acc_stderr": 0.024121125416941183,
|
162 |
-
"acc_norm": 0.6538461538461539,
|
163 |
-
"acc_norm_stderr": 0.024121125416941183
|
164 |
-
},
|
165 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
166 |
-
"acc": 0.2962962962962963,
|
167 |
-
"acc_stderr": 0.02784081149587194,
|
168 |
-
"acc_norm": 0.2962962962962963,
|
169 |
-
"acc_norm_stderr": 0.02784081149587194
|
170 |
-
},
|
171 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
172 |
-
"acc": 0.7058823529411765,
|
173 |
-
"acc_stderr": 0.02959732973097811,
|
174 |
-
"acc_norm": 0.7058823529411765,
|
175 |
-
"acc_norm_stderr": 0.02959732973097811
|
176 |
-
},
|
177 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
178 |
-
"acc": 0.41721854304635764,
|
179 |
-
"acc_stderr": 0.04026141497634611,
|
180 |
-
"acc_norm": 0.41721854304635764,
|
181 |
-
"acc_norm_stderr": 0.04026141497634611
|
182 |
-
},
|
183 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
184 |
-
"acc": 0.8275229357798165,
|
185 |
-
"acc_stderr": 0.01619780795684803,
|
186 |
-
"acc_norm": 0.8275229357798165,
|
187 |
-
"acc_norm_stderr": 0.01619780795684803
|
188 |
-
},
|
189 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
190 |
-
"acc": 0.49537037037037035,
|
191 |
-
"acc_stderr": 0.03409825519163572,
|
192 |
-
"acc_norm": 0.49537037037037035,
|
193 |
-
"acc_norm_stderr": 0.03409825519163572
|
194 |
-
},
|
195 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
196 |
-
"acc": 0.8480392156862745,
|
197 |
-
"acc_stderr": 0.025195658428931796,
|
198 |
-
"acc_norm": 0.8480392156862745,
|
199 |
-
"acc_norm_stderr": 0.025195658428931796
|
200 |
-
},
|
201 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
202 |
-
"acc": 0.8565400843881856,
|
203 |
-
"acc_stderr": 0.022818291821017012,
|
204 |
-
"acc_norm": 0.8565400843881856,
|
205 |
-
"acc_norm_stderr": 0.022818291821017012
|
206 |
-
},
|
207 |
-
"harness|hendrycksTest-human_aging|5": {
|
208 |
-
"acc": 0.6995515695067265,
|
209 |
-
"acc_stderr": 0.03076935200822915,
|
210 |
-
"acc_norm": 0.6995515695067265,
|
211 |
-
"acc_norm_stderr": 0.03076935200822915
|
212 |
-
},
|
213 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
214 |
-
"acc": 0.7633587786259542,
|
215 |
-
"acc_stderr": 0.03727673575596914,
|
216 |
-
"acc_norm": 0.7633587786259542,
|
217 |
-
"acc_norm_stderr": 0.03727673575596914
|
218 |
-
},
|
219 |
-
"harness|hendrycksTest-international_law|5": {
|
220 |
-
"acc": 0.8347107438016529,
|
221 |
-
"acc_stderr": 0.03390780612972776,
|
222 |
-
"acc_norm": 0.8347107438016529,
|
223 |
-
"acc_norm_stderr": 0.03390780612972776
|
224 |
-
},
|
225 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
226 |
-
"acc": 0.7962962962962963,
|
227 |
-
"acc_stderr": 0.03893542518824847,
|
228 |
-
"acc_norm": 0.7962962962962963,
|
229 |
-
"acc_norm_stderr": 0.03893542518824847
|
230 |
-
},
|
231 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
232 |
-
"acc": 0.7607361963190185,
|
233 |
-
"acc_stderr": 0.033519538795212696,
|
234 |
-
"acc_norm": 0.7607361963190185,
|
235 |
-
"acc_norm_stderr": 0.033519538795212696
|
236 |
-
},
|
237 |
-
"harness|hendrycksTest-machine_learning|5": {
|
238 |
-
"acc": 0.48214285714285715,
|
239 |
-
"acc_stderr": 0.047427623612430116,
|
240 |
-
"acc_norm": 0.48214285714285715,
|
241 |
-
"acc_norm_stderr": 0.047427623612430116
|
242 |
-
},
|
243 |
-
"harness|hendrycksTest-management|5": {
|
244 |
-
"acc": 0.8252427184466019,
|
245 |
-
"acc_stderr": 0.03760178006026621,
|
246 |
-
"acc_norm": 0.8252427184466019,
|
247 |
-
"acc_norm_stderr": 0.03760178006026621
|
248 |
-
},
|
249 |
-
"harness|hendrycksTest-marketing|5": {
|
250 |
-
"acc": 0.8803418803418803,
|
251 |
-
"acc_stderr": 0.02126271940040697,
|
252 |
-
"acc_norm": 0.8803418803418803,
|
253 |
-
"acc_norm_stderr": 0.02126271940040697
|
254 |
-
},
|
255 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
256 |
-
"acc": 0.68,
|
257 |
-
"acc_stderr": 0.04688261722621505,
|
258 |
-
"acc_norm": 0.68,
|
259 |
-
"acc_norm_stderr": 0.04688261722621505
|
260 |
-
},
|
261 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
262 |
-
"acc": 0.8301404853128991,
|
263 |
-
"acc_stderr": 0.013428186370608294,
|
264 |
-
"acc_norm": 0.8301404853128991,
|
265 |
-
"acc_norm_stderr": 0.013428186370608294
|
266 |
-
},
|
267 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
268 |
-
"acc": 0.7514450867052023,
|
269 |
-
"acc_stderr": 0.02326752843210017,
|
270 |
-
"acc_norm": 0.7514450867052023,
|
271 |
-
"acc_norm_stderr": 0.02326752843210017
|
272 |
-
},
|
273 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
274 |
-
"acc": 0.49050279329608937,
|
275 |
-
"acc_stderr": 0.016719484643348752,
|
276 |
-
"acc_norm": 0.49050279329608937,
|
277 |
-
"acc_norm_stderr": 0.016719484643348752
|
278 |
-
},
|
279 |
-
"harness|hendrycksTest-nutrition|5": {
|
280 |
-
"acc": 0.7124183006535948,
|
281 |
-
"acc_stderr": 0.02591780611714716,
|
282 |
-
"acc_norm": 0.7124183006535948,
|
283 |
-
"acc_norm_stderr": 0.02591780611714716
|
284 |
-
},
|
285 |
-
"harness|hendrycksTest-philosophy|5": {
|
286 |
-
"acc": 0.7202572347266881,
|
287 |
-
"acc_stderr": 0.025494259350694905,
|
288 |
-
"acc_norm": 0.7202572347266881,
|
289 |
-
"acc_norm_stderr": 0.025494259350694905
|
290 |
-
},
|
291 |
-
"harness|hendrycksTest-prehistory|5": {
|
292 |
-
"acc": 0.7407407407407407,
|
293 |
-
"acc_stderr": 0.024383665531035454,
|
294 |
-
"acc_norm": 0.7407407407407407,
|
295 |
-
"acc_norm_stderr": 0.024383665531035454
|
296 |
-
},
|
297 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
298 |
-
"acc": 0.5283687943262412,
|
299 |
-
"acc_stderr": 0.029779450957303055,
|
300 |
-
"acc_norm": 0.5283687943262412,
|
301 |
-
"acc_norm_stderr": 0.029779450957303055
|
302 |
-
},
|
303 |
-
"harness|hendrycksTest-professional_law|5": {
|
304 |
-
"acc": 0.49608865710560623,
|
305 |
-
"acc_stderr": 0.012769845366441194,
|
306 |
-
"acc_norm": 0.49608865710560623,
|
307 |
-
"acc_norm_stderr": 0.012769845366441194
|
308 |
-
},
|
309 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
310 |
-
"acc": 0.6139705882352942,
|
311 |
-
"acc_stderr": 0.029573269134411124,
|
312 |
-
"acc_norm": 0.6139705882352942,
|
313 |
-
"acc_norm_stderr": 0.029573269134411124
|
314 |
-
},
|
315 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
316 |
-
"acc": 0.696078431372549,
|
317 |
-
"acc_stderr": 0.01860755213127983,
|
318 |
-
"acc_norm": 0.696078431372549,
|
319 |
-
"acc_norm_stderr": 0.01860755213127983
|
320 |
-
},
|
321 |
-
"harness|hendrycksTest-public_relations|5": {
|
322 |
-
"acc": 0.7363636363636363,
|
323 |
-
"acc_stderr": 0.04220224692971987,
|
324 |
-
"acc_norm": 0.7363636363636363,
|
325 |
-
"acc_norm_stderr": 0.04220224692971987
|
326 |
-
},
|
327 |
-
"harness|hendrycksTest-security_studies|5": {
|
328 |
-
"acc": 0.7795918367346939,
|
329 |
-
"acc_stderr": 0.026537045312145277,
|
330 |
-
"acc_norm": 0.7795918367346939,
|
331 |
-
"acc_norm_stderr": 0.026537045312145277
|
332 |
-
},
|
333 |
-
"harness|hendrycksTest-sociology|5": {
|
334 |
-
"acc": 0.8606965174129353,
|
335 |
-
"acc_stderr": 0.02448448716291397,
|
336 |
-
"acc_norm": 0.8606965174129353,
|
337 |
-
"acc_norm_stderr": 0.02448448716291397
|
338 |
-
},
|
339 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
340 |
-
"acc": 0.82,
|
341 |
-
"acc_stderr": 0.03861229196653694,
|
342 |
-
"acc_norm": 0.82,
|
343 |
-
"acc_norm_stderr": 0.03861229196653694
|
344 |
-
},
|
345 |
-
"harness|hendrycksTest-virology|5": {
|
346 |
-
"acc": 0.536144578313253,
|
347 |
-
"acc_stderr": 0.038823108508905954,
|
348 |
-
"acc_norm": 0.536144578313253,
|
349 |
-
"acc_norm_stderr": 0.038823108508905954
|
350 |
-
},
|
351 |
-
"harness|hendrycksTest-world_religions|5": {
|
352 |
-
"acc": 0.8011695906432749,
|
353 |
-
"acc_stderr": 0.030611116557432528,
|
354 |
-
"acc_norm": 0.8011695906432749,
|
355 |
-
"acc_norm_stderr": 0.030611116557432528
|
356 |
-
},
|
357 |
-
"harness|truthfulqa:mc|0": {
|
358 |
-
"mc1": 0.42962056303549573,
|
359 |
-
"mc1_stderr": 0.017329234580409095,
|
360 |
-
"mc2": 0.5969914036089322,
|
361 |
-
"mc2_stderr": 0.015215660759560816
|
362 |
-
},
|
363 |
-
"all": {
|
364 |
-
"acc": 0.6482368382814275,
|
365 |
-
"acc_stderr": 0.03259733412834221,
|
366 |
-
"acc_norm": 0.6520797942970061,
|
367 |
-
"acc_norm_stderr": 0.03256949071157532,
|
368 |
-
"mc1": 0.42962056303549573,
|
369 |
-
"mc1_stderr": 0.017329234580409095,
|
370 |
-
"mc2": 0.5969914036089322,
|
371 |
-
"mc2_stderr": 0.015215660759560816
|
372 |
-
}
|
373 |
-
},
|
374 |
-
"versions": {
|
375 |
-
"harness|arc:challenge|25": 0,
|
376 |
-
"harness|hellaswag|10": 0,
|
377 |
-
"harness|hendrycksTest-abstract_algebra|5": 1,
|
378 |
-
"harness|hendrycksTest-anatomy|5": 1,
|
379 |
-
"harness|hendrycksTest-astronomy|5": 1,
|
380 |
-
"harness|hendrycksTest-business_ethics|5": 1,
|
381 |
-
"harness|hendrycksTest-clinical_knowledge|5": 1,
|
382 |
-
"harness|hendrycksTest-college_biology|5": 1,
|
383 |
-
"harness|hendrycksTest-college_chemistry|5": 1,
|
384 |
-
"harness|hendrycksTest-college_computer_science|5": 1,
|
385 |
-
"harness|hendrycksTest-college_mathematics|5": 1,
|
386 |
-
"harness|hendrycksTest-college_medicine|5": 1,
|
387 |
-
"harness|hendrycksTest-college_physics|5": 1,
|
388 |
-
"harness|hendrycksTest-computer_security|5": 1,
|
389 |
-
"harness|hendrycksTest-conceptual_physics|5": 1,
|
390 |
-
"harness|hendrycksTest-econometrics|5": 1,
|
391 |
-
"harness|hendrycksTest-electrical_engineering|5": 1,
|
392 |
-
"harness|hendrycksTest-elementary_mathematics|5": 1,
|
393 |
-
"harness|hendrycksTest-formal_logic|5": 1,
|
394 |
-
"harness|hendrycksTest-global_facts|5": 1,
|
395 |
-
"harness|hendrycksTest-high_school_biology|5": 1,
|
396 |
-
"harness|hendrycksTest-high_school_chemistry|5": 1,
|
397 |
-
"harness|hendrycksTest-high_school_computer_science|5": 1,
|
398 |
-
"harness|hendrycksTest-high_school_european_history|5": 1,
|
399 |
-
"harness|hendrycksTest-high_school_geography|5": 1,
|
400 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
|
401 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
|
402 |
-
"harness|hendrycksTest-high_school_mathematics|5": 1,
|
403 |
-
"harness|hendrycksTest-high_school_microeconomics|5": 1,
|
404 |
-
"harness|hendrycksTest-high_school_physics|5": 1,
|
405 |
-
"harness|hendrycksTest-high_school_psychology|5": 1,
|
406 |
-
"harness|hendrycksTest-high_school_statistics|5": 1,
|
407 |
-
"harness|hendrycksTest-high_school_us_history|5": 1,
|
408 |
-
"harness|hendrycksTest-high_school_world_history|5": 1,
|
409 |
-
"harness|hendrycksTest-human_aging|5": 1,
|
410 |
-
"harness|hendrycksTest-human_sexuality|5": 1,
|
411 |
-
"harness|hendrycksTest-international_law|5": 1,
|
412 |
-
"harness|hendrycksTest-jurisprudence|5": 1,
|
413 |
-
"harness|hendrycksTest-logical_fallacies|5": 1,
|
414 |
-
"harness|hendrycksTest-machine_learning|5": 1,
|
415 |
-
"harness|hendrycksTest-management|5": 1,
|
416 |
-
"harness|hendrycksTest-marketing|5": 1,
|
417 |
-
"harness|hendrycksTest-medical_genetics|5": 1,
|
418 |
-
"harness|hendrycksTest-miscellaneous|5": 1,
|
419 |
-
"harness|hendrycksTest-moral_disputes|5": 1,
|
420 |
-
"harness|hendrycksTest-moral_scenarios|5": 1,
|
421 |
-
"harness|hendrycksTest-nutrition|5": 1,
|
422 |
-
"harness|hendrycksTest-philosophy|5": 1,
|
423 |
-
"harness|hendrycksTest-prehistory|5": 1,
|
424 |
-
"harness|hendrycksTest-professional_accounting|5": 1,
|
425 |
-
"harness|hendrycksTest-professional_law|5": 1,
|
426 |
-
"harness|hendrycksTest-professional_medicine|5": 1,
|
427 |
-
"harness|hendrycksTest-professional_psychology|5": 1,
|
428 |
-
"harness|hendrycksTest-public_relations|5": 1,
|
429 |
-
"harness|hendrycksTest-security_studies|5": 1,
|
430 |
-
"harness|hendrycksTest-sociology|5": 1,
|
431 |
-
"harness|hendrycksTest-us_foreign_policy|5": 1,
|
432 |
-
"harness|hendrycksTest-virology|5": 1,
|
433 |
-
"harness|hendrycksTest-world_religions|5": 1,
|
434 |
-
"harness|truthfulqa:mc|0": 1,
|
435 |
-
"all": 0
|
436 |
-
},
|
437 |
-
"config_general": {
|
438 |
-
"model_name": "upstage/llama-65b-instruct",
|
439 |
-
"model_sha": "f70a9865cb0a1ac1157ad928b3b428dd85d52946",
|
440 |
-
"model_dtype": "torch.float16",
|
441 |
-
"lighteval_sha": "efe93333f9f25e7d48cc67a6bf362e6d576f727b",
|
442 |
-
"num_few_shot_default": 0,
|
443 |
-
"num_fewshot_seeds": 1,
|
444 |
-
"override_batch_size": 1,
|
445 |
-
"max_samples": null
|
446 |
-
},
|
447 |
-
"config_tasks": {
|
448 |
-
"harness|arc:challenge": "LM Harness task",
|
449 |
-
"harness|hellaswag": "LM Harness task",
|
450 |
-
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
|
451 |
-
"harness|hendrycksTest-anatomy": "LM Harness task",
|
452 |
-
"harness|hendrycksTest-astronomy": "LM Harness task",
|
453 |
-
"harness|hendrycksTest-business_ethics": "LM Harness task",
|
454 |
-
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
|
455 |
-
"harness|hendrycksTest-college_biology": "LM Harness task",
|
456 |
-
"harness|hendrycksTest-college_chemistry": "LM Harness task",
|
457 |
-
"harness|hendrycksTest-college_computer_science": "LM Harness task",
|
458 |
-
"harness|hendrycksTest-college_mathematics": "LM Harness task",
|
459 |
-
"harness|hendrycksTest-college_medicine": "LM Harness task",
|
460 |
-
"harness|hendrycksTest-college_physics": "LM Harness task",
|
461 |
-
"harness|hendrycksTest-computer_security": "LM Harness task",
|
462 |
-
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
|
463 |
-
"harness|hendrycksTest-econometrics": "LM Harness task",
|
464 |
-
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
|
465 |
-
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
|
466 |
-
"harness|hendrycksTest-formal_logic": "LM Harness task",
|
467 |
-
"harness|hendrycksTest-global_facts": "LM Harness task",
|
468 |
-
"harness|hendrycksTest-high_school_biology": "LM Harness task",
|
469 |
-
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
|
470 |
-
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
|
471 |
-
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
|
472 |
-
"harness|hendrycksTest-high_school_geography": "LM Harness task",
|
473 |
-
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
|
474 |
-
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
|
475 |
-
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
|
476 |
-
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
|
477 |
-
"harness|hendrycksTest-high_school_physics": "LM Harness task",
|
478 |
-
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
|
479 |
-
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
|
480 |
-
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
|
481 |
-
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
|
482 |
-
"harness|hendrycksTest-human_aging": "LM Harness task",
|
483 |
-
"harness|hendrycksTest-human_sexuality": "LM Harness task",
|
484 |
-
"harness|hendrycksTest-international_law": "LM Harness task",
|
485 |
-
"harness|hendrycksTest-jurisprudence": "LM Harness task",
|
486 |
-
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
|
487 |
-
"harness|hendrycksTest-machine_learning": "LM Harness task",
|
488 |
-
"harness|hendrycksTest-management": "LM Harness task",
|
489 |
-
"harness|hendrycksTest-marketing": "LM Harness task",
|
490 |
-
"harness|hendrycksTest-medical_genetics": "LM Harness task",
|
491 |
-
"harness|hendrycksTest-miscellaneous": "LM Harness task",
|
492 |
-
"harness|hendrycksTest-moral_disputes": "LM Harness task",
|
493 |
-
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
|
494 |
-
"harness|hendrycksTest-nutrition": "LM Harness task",
|
495 |
-
"harness|hendrycksTest-philosophy": "LM Harness task",
|
496 |
-
"harness|hendrycksTest-prehistory": "LM Harness task",
|
497 |
-
"harness|hendrycksTest-professional_accounting": "LM Harness task",
|
498 |
-
"harness|hendrycksTest-professional_law": "LM Harness task",
|
499 |
-
"harness|hendrycksTest-professional_medicine": "LM Harness task",
|
500 |
-
"harness|hendrycksTest-professional_psychology": "LM Harness task",
|
501 |
-
"harness|hendrycksTest-public_relations": "LM Harness task",
|
502 |
-
"harness|hendrycksTest-security_studies": "LM Harness task",
|
503 |
-
"harness|hendrycksTest-sociology": "LM Harness task",
|
504 |
-
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
|
505 |
-
"harness|hendrycksTest-virology": "LM Harness task",
|
506 |
-
"harness|hendrycksTest-world_religions": "LM Harness task",
|
507 |
-
"harness|truthfulqa:mc": "LM Harness task"
|
508 |
-
},
|
509 |
-
"summary_tasks": {
|
510 |
-
"harness|arc:challenge|25": {
|
511 |
-
"hashes": {
|
512 |
-
"hash_examples": "17b0cae357c0259e",
|
513 |
-
"hash_full_prompts": "045cbb916e5145c6",
|
514 |
-
"hash_input_tokens": "61571bf68d6d89aa",
|
515 |
-
"hash_cont_tokens": "ede2b335438f08e9"
|
516 |
-
},
|
517 |
-
"truncated": 0,
|
518 |
-
"non-truncated": 4687,
|
519 |
-
"padded": 4687,
|
520 |
-
"non-padded": 0,
|
521 |
-
"effective_few_shots": 25.0,
|
522 |
-
"num_truncated_few_shots": 0
|
523 |
-
},
|
524 |
-
"harness|hellaswag|10": {
|
525 |
-
"hashes": {
|
526 |
-
"hash_examples": "e1768ecb99d7ecf0",
|
527 |
-
"hash_full_prompts": "0b4c16983130f84f",
|
528 |
-
"hash_input_tokens": "29906669b1c7054a",
|
529 |
-
"hash_cont_tokens": "b41cf1ad182d68d5"
|
530 |
-
},
|
531 |
-
"truncated": 0,
|
532 |
-
"non-truncated": 40168,
|
533 |
-
"padded": 40113,
|
534 |
-
"non-padded": 55,
|
535 |
-
"effective_few_shots": 10.0,
|
536 |
-
"num_truncated_few_shots": 0
|
537 |
-
},
|
538 |
-
"harness|hendrycksTest-abstract_algebra|5": {
|
539 |
-
"hashes": {
|
540 |
-
"hash_examples": "280f9f325b40559a",
|
541 |
-
"hash_full_prompts": "2f776a367d23aea2",
|
542 |
-
"hash_input_tokens": "c54ff61ad0273dd7",
|
543 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
544 |
-
},
|
545 |
-
"truncated": 0,
|
546 |
-
"non-truncated": 400,
|
547 |
-
"padded": 400,
|
548 |
-
"non-padded": 0,
|
549 |
-
"effective_few_shots": 5.0,
|
550 |
-
"num_truncated_few_shots": 0
|
551 |
-
},
|
552 |
-
"harness|hendrycksTest-anatomy|5": {
|
553 |
-
"hashes": {
|
554 |
-
"hash_examples": "2f83a4f1cab4ba18",
|
555 |
-
"hash_full_prompts": "516f74bef25df620",
|
556 |
-
"hash_input_tokens": "be31a1e22aef5f90",
|
557 |
-
"hash_cont_tokens": "f11971a765cb609f"
|
558 |
-
},
|
559 |
-
"truncated": 0,
|
560 |
-
"non-truncated": 540,
|
561 |
-
"padded": 540,
|
562 |
-
"non-padded": 0,
|
563 |
-
"effective_few_shots": 5.0,
|
564 |
-
"num_truncated_few_shots": 0
|
565 |
-
},
|
566 |
-
"harness|hendrycksTest-astronomy|5": {
|
567 |
-
"hashes": {
|
568 |
-
"hash_examples": "7d587b908da4d762",
|
569 |
-
"hash_full_prompts": "faf4e80f65de93ca",
|
570 |
-
"hash_input_tokens": "277a7b1fad566940",
|
571 |
-
"hash_cont_tokens": "238bd86950544b29"
|
572 |
-
},
|
573 |
-
"truncated": 0,
|
574 |
-
"non-truncated": 608,
|
575 |
-
"padded": 608,
|
576 |
-
"non-padded": 0,
|
577 |
-
"effective_few_shots": 5.0,
|
578 |
-
"num_truncated_few_shots": 0
|
579 |
-
},
|
580 |
-
"harness|hendrycksTest-business_ethics|5": {
|
581 |
-
"hashes": {
|
582 |
-
"hash_examples": "33e51740670de686",
|
583 |
-
"hash_full_prompts": "db01c3ef8e1479d4",
|
584 |
-
"hash_input_tokens": "ba552605bc116de5",
|
585 |
-
"hash_cont_tokens": "f9d6d2a7d7e9a041"
|
586 |
-
},
|
587 |
-
"truncated": 0,
|
588 |
-
"non-truncated": 400,
|
589 |
-
"padded": 400,
|
590 |
-
"non-padded": 0,
|
591 |
-
"effective_few_shots": 5.0,
|
592 |
-
"num_truncated_few_shots": 0
|
593 |
-
},
|
594 |
-
"harness|hendrycksTest-clinical_knowledge|5": {
|
595 |
-
"hashes": {
|
596 |
-
"hash_examples": "f3366dbe7eefffa4",
|
597 |
-
"hash_full_prompts": "49654f71d94b65c3",
|
598 |
-
"hash_input_tokens": "428c7563d0b98ab9",
|
599 |
-
"hash_cont_tokens": "6af58623d0d5fbcd"
|
600 |
-
},
|
601 |
-
"truncated": 0,
|
602 |
-
"non-truncated": 1060,
|
603 |
-
"padded": 1060,
|
604 |
-
"non-padded": 0,
|
605 |
-
"effective_few_shots": 5.0,
|
606 |
-
"num_truncated_few_shots": 0
|
607 |
-
},
|
608 |
-
"harness|hendrycksTest-college_biology|5": {
|
609 |
-
"hashes": {
|
610 |
-
"hash_examples": "ca2b6753a0193e7f",
|
611 |
-
"hash_full_prompts": "2b460b75f1fdfefd",
|
612 |
-
"hash_input_tokens": "da036601573942e2",
|
613 |
-
"hash_cont_tokens": "875cde3af7a0ee14"
|
614 |
-
},
|
615 |
-
"truncated": 0,
|
616 |
-
"non-truncated": 576,
|
617 |
-
"padded": 576,
|
618 |
-
"non-padded": 0,
|
619 |
-
"effective_few_shots": 5.0,
|
620 |
-
"num_truncated_few_shots": 0
|
621 |
-
},
|
622 |
-
"harness|hendrycksTest-college_chemistry|5": {
|
623 |
-
"hashes": {
|
624 |
-
"hash_examples": "22ff85f1d34f42d1",
|
625 |
-
"hash_full_prompts": "242c9be6da583e95",
|
626 |
-
"hash_input_tokens": "94e0196d6aded13d",
|
627 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
628 |
-
},
|
629 |
-
"truncated": 0,
|
630 |
-
"non-truncated": 400,
|
631 |
-
"padded": 400,
|
632 |
-
"non-padded": 0,
|
633 |
-
"effective_few_shots": 5.0,
|
634 |
-
"num_truncated_few_shots": 0
|
635 |
-
},
|
636 |
-
"harness|hendrycksTest-college_computer_science|5": {
|
637 |
-
"hashes": {
|
638 |
-
"hash_examples": "30318289d717a5cf",
|
639 |
-
"hash_full_prompts": "ed2bdb4e87c4b371",
|
640 |
-
"hash_input_tokens": "6e4d0f4a8d36690b",
|
641 |
-
"hash_cont_tokens": "1ba0c71186b1505e"
|
642 |
-
},
|
643 |
-
"truncated": 0,
|
644 |
-
"non-truncated": 400,
|
645 |
-
"padded": 400,
|
646 |
-
"non-padded": 0,
|
647 |
-
"effective_few_shots": 5.0,
|
648 |
-
"num_truncated_few_shots": 0
|
649 |
-
},
|
650 |
-
"harness|hendrycksTest-college_mathematics|5": {
|
651 |
-
"hashes": {
|
652 |
-
"hash_examples": "4944d1f0b6b5d911",
|
653 |
-
"hash_full_prompts": "770bc4281c973190",
|
654 |
-
"hash_input_tokens": "614054d17109a25d",
|
655 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
656 |
-
},
|
657 |
-
"truncated": 0,
|
658 |
-
"non-truncated": 400,
|
659 |
-
"padded": 400,
|
660 |
-
"non-padded": 0,
|
661 |
-
"effective_few_shots": 5.0,
|
662 |
-
"num_truncated_few_shots": 0
|
663 |
-
},
|
664 |
-
"harness|hendrycksTest-college_medicine|5": {
|
665 |
-
"hashes": {
|
666 |
-
"hash_examples": "dd69cc33381275af",
|
667 |
-
"hash_full_prompts": "ad2a53e5250ab46e",
|
668 |
-
"hash_input_tokens": "1d633b3cc0524ba8",
|
669 |
-
"hash_cont_tokens": "702fb6d82ff0d6ac"
|
670 |
-
},
|
671 |
-
"truncated": 0,
|
672 |
-
"non-truncated": 692,
|
673 |
-
"padded": 692,
|
674 |
-
"non-padded": 0,
|
675 |
-
"effective_few_shots": 5.0,
|
676 |
-
"num_truncated_few_shots": 0
|
677 |
-
},
|
678 |
-
"harness|hendrycksTest-college_physics|5": {
|
679 |
-
"hashes": {
|
680 |
-
"hash_examples": "875dd26d22655b0d",
|
681 |
-
"hash_full_prompts": "833a0d7b55aed500",
|
682 |
-
"hash_input_tokens": "5421d9a1af86cbd4",
|
683 |
-
"hash_cont_tokens": "f7b8097afc16a47c"
|
684 |
-
},
|
685 |
-
"truncated": 0,
|
686 |
-
"non-truncated": 408,
|
687 |
-
"padded": 408,
|
688 |
-
"non-padded": 0,
|
689 |
-
"effective_few_shots": 5.0,
|
690 |
-
"num_truncated_few_shots": 0
|
691 |
-
},
|
692 |
-
"harness|hendrycksTest-computer_security|5": {
|
693 |
-
"hashes": {
|
694 |
-
"hash_examples": "006451eedc0ededb",
|
695 |
-
"hash_full_prompts": "94034c97e85d8f46",
|
696 |
-
"hash_input_tokens": "5e6b70ecb333cf18",
|
697 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
698 |
-
},
|
699 |
-
"truncated": 0,
|
700 |
-
"non-truncated": 400,
|
701 |
-
"padded": 400,
|
702 |
-
"non-padded": 0,
|
703 |
-
"effective_few_shots": 5.0,
|
704 |
-
"num_truncated_few_shots": 0
|
705 |
-
},
|
706 |
-
"harness|hendrycksTest-conceptual_physics|5": {
|
707 |
-
"hashes": {
|
708 |
-
"hash_examples": "8874ece872d2ca4c",
|
709 |
-
"hash_full_prompts": "e40d15a34640d6fa",
|
710 |
-
"hash_input_tokens": "c2ef11a87264ceed",
|
711 |
-
"hash_cont_tokens": "aa0e8bc655f2f641"
|
712 |
-
},
|
713 |
-
"truncated": 0,
|
714 |
-
"non-truncated": 940,
|
715 |
-
"padded": 940,
|
716 |
-
"non-padded": 0,
|
717 |
-
"effective_few_shots": 5.0,
|
718 |
-
"num_truncated_few_shots": 0
|
719 |
-
},
|
720 |
-
"harness|hendrycksTest-econometrics|5": {
|
721 |
-
"hashes": {
|
722 |
-
"hash_examples": "64d3623b0bfaa43f",
|
723 |
-
"hash_full_prompts": "612f340fae41338d",
|
724 |
-
"hash_input_tokens": "ecaccd912a4c3978",
|
725 |
-
"hash_cont_tokens": "a9b1f761089f6acc"
|
726 |
-
},
|
727 |
-
"truncated": 0,
|
728 |
-
"non-truncated": 456,
|
729 |
-
"padded": 456,
|
730 |
-
"non-padded": 0,
|
731 |
-
"effective_few_shots": 5.0,
|
732 |
-
"num_truncated_few_shots": 0
|
733 |
-
},
|
734 |
-
"harness|hendrycksTest-electrical_engineering|5": {
|
735 |
-
"hashes": {
|
736 |
-
"hash_examples": "e98f51780c674d7e",
|
737 |
-
"hash_full_prompts": "10275b312d812ae6",
|
738 |
-
"hash_input_tokens": "1590c84291399be8",
|
739 |
-
"hash_cont_tokens": "2425a3f084a591ef"
|
740 |
-
},
|
741 |
-
"truncated": 0,
|
742 |
-
"non-truncated": 580,
|
743 |
-
"padded": 580,
|
744 |
-
"non-padded": 0,
|
745 |
-
"effective_few_shots": 5.0,
|
746 |
-
"num_truncated_few_shots": 0
|
747 |
-
},
|
748 |
-
"harness|hendrycksTest-elementary_mathematics|5": {
|
749 |
-
"hashes": {
|
750 |
-
"hash_examples": "fc48208a5ac1c0ce",
|
751 |
-
"hash_full_prompts": "5ec274c6c82aca23",
|
752 |
-
"hash_input_tokens": "3269597f715b0da1",
|
753 |
-
"hash_cont_tokens": "eb2d5002052b5bc5"
|
754 |
-
},
|
755 |
-
"truncated": 0,
|
756 |
-
"non-truncated": 1512,
|
757 |
-
"padded": 1512,
|
758 |
-
"non-padded": 0,
|
759 |
-
"effective_few_shots": 5.0,
|
760 |
-
"num_truncated_few_shots": 0
|
761 |
-
},
|
762 |
-
"harness|hendrycksTest-formal_logic|5": {
|
763 |
-
"hashes": {
|
764 |
-
"hash_examples": "5a6525665f63ea72",
|
765 |
-
"hash_full_prompts": "07b92638c4a6b500",
|
766 |
-
"hash_input_tokens": "a2800d20f3ab8d7c",
|
767 |
-
"hash_cont_tokens": "9b30dc19c9b62f60"
|
768 |
-
},
|
769 |
-
"truncated": 0,
|
770 |
-
"non-truncated": 504,
|
771 |
-
"padded": 504,
|
772 |
-
"non-padded": 0,
|
773 |
-
"effective_few_shots": 5.0,
|
774 |
-
"num_truncated_few_shots": 0
|
775 |
-
},
|
776 |
-
"harness|hendrycksTest-global_facts|5": {
|
777 |
-
"hashes": {
|
778 |
-
"hash_examples": "371d70d743b2b89b",
|
779 |
-
"hash_full_prompts": "332fdee50a1921b4",
|
780 |
-
"hash_input_tokens": "94ed44b3772505ad",
|
781 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
782 |
-
},
|
783 |
-
"truncated": 0,
|
784 |
-
"non-truncated": 400,
|
785 |
-
"padded": 400,
|
786 |
-
"non-padded": 0,
|
787 |
-
"effective_few_shots": 5.0,
|
788 |
-
"num_truncated_few_shots": 0
|
789 |
-
},
|
790 |
-
"harness|hendrycksTest-high_school_biology|5": {
|
791 |
-
"hashes": {
|
792 |
-
"hash_examples": "a79e1018b1674052",
|
793 |
-
"hash_full_prompts": "e624e26ede922561",
|
794 |
-
"hash_input_tokens": "24423acb928db768",
|
795 |
-
"hash_cont_tokens": "74217a4e2868536f"
|
796 |
-
},
|
797 |
-
"truncated": 0,
|
798 |
-
"non-truncated": 1240,
|
799 |
-
"padded": 1240,
|
800 |
-
"non-padded": 0,
|
801 |
-
"effective_few_shots": 5.0,
|
802 |
-
"num_truncated_few_shots": 0
|
803 |
-
},
|
804 |
-
"harness|hendrycksTest-high_school_chemistry|5": {
|
805 |
-
"hashes": {
|
806 |
-
"hash_examples": "44bfc25c389f0e03",
|
807 |
-
"hash_full_prompts": "0e3e5f5d9246482a",
|
808 |
-
"hash_input_tokens": "831ff35c474e5cef",
|
809 |
-
"hash_cont_tokens": "bf39544be0ebf000"
|
810 |
-
},
|
811 |
-
"truncated": 0,
|
812 |
-
"non-truncated": 812,
|
813 |
-
"padded": 812,
|
814 |
-
"non-padded": 0,
|
815 |
-
"effective_few_shots": 5.0,
|
816 |
-
"num_truncated_few_shots": 0
|
817 |
-
},
|
818 |
-
"harness|hendrycksTest-high_school_computer_science|5": {
|
819 |
-
"hashes": {
|
820 |
-
"hash_examples": "8b8cdb1084f24169",
|
821 |
-
"hash_full_prompts": "c00487e67c1813cc",
|
822 |
-
"hash_input_tokens": "8c34e0f2bda77358",
|
823 |
-
"hash_cont_tokens": "43570b3948564b64"
|
824 |
-
},
|
825 |
-
"truncated": 0,
|
826 |
-
"non-truncated": 400,
|
827 |
-
"padded": 400,
|
828 |
-
"non-padded": 0,
|
829 |
-
"effective_few_shots": 5.0,
|
830 |
-
"num_truncated_few_shots": 0
|
831 |
-
},
|
832 |
-
"harness|hendrycksTest-high_school_european_history|5": {
|
833 |
-
"hashes": {
|
834 |
-
"hash_examples": "11cd32d0ef440171",
|
835 |
-
"hash_full_prompts": "318f4513c537c6bf",
|
836 |
-
"hash_input_tokens": "f1f73dd687da18d7",
|
837 |
-
"hash_cont_tokens": "674fc454bdc5ac93"
|
838 |
-
},
|
839 |
-
"truncated": 660,
|
840 |
-
"non-truncated": 0,
|
841 |
-
"padded": 0,
|
842 |
-
"non-padded": 660,
|
843 |
-
"effective_few_shots": 5.0,
|
844 |
-
"num_truncated_few_shots": 0
|
845 |
-
},
|
846 |
-
"harness|hendrycksTest-high_school_geography|5": {
|
847 |
-
"hashes": {
|
848 |
-
"hash_examples": "b60019b9e80b642f",
|
849 |
-
"hash_full_prompts": "ee5789fcc1a81b1e",
|
850 |
-
"hash_input_tokens": "7c5547c7da5bc793",
|
851 |
-
"hash_cont_tokens": "03a5012b916274ea"
|
852 |
-
},
|
853 |
-
"truncated": 0,
|
854 |
-
"non-truncated": 792,
|
855 |
-
"padded": 792,
|
856 |
-
"non-padded": 0,
|
857 |
-
"effective_few_shots": 5.0,
|
858 |
-
"num_truncated_few_shots": 0
|
859 |
-
},
|
860 |
-
"harness|hendrycksTest-high_school_government_and_politics|5": {
|
861 |
-
"hashes": {
|
862 |
-
"hash_examples": "d221ec983d143dc3",
|
863 |
-
"hash_full_prompts": "ac42d888e1ce1155",
|
864 |
-
"hash_input_tokens": "f62991cb6a496b05",
|
865 |
-
"hash_cont_tokens": "50ab225c2f535210"
|
866 |
-
},
|
867 |
-
"truncated": 0,
|
868 |
-
"non-truncated": 772,
|
869 |
-
"padded": 772,
|
870 |
-
"non-padded": 0,
|
871 |
-
"effective_few_shots": 5.0,
|
872 |
-
"num_truncated_few_shots": 0
|
873 |
-
},
|
874 |
-
"harness|hendrycksTest-high_school_macroeconomics|5": {
|
875 |
-
"hashes": {
|
876 |
-
"hash_examples": "59c2915cacfd3fbb",
|
877 |
-
"hash_full_prompts": "c6bd9d25158abd0e",
|
878 |
-
"hash_input_tokens": "4cef2aff6e3d59ed",
|
879 |
-
"hash_cont_tokens": "c583432ad27fcfe0"
|
880 |
-
},
|
881 |
-
"truncated": 0,
|
882 |
-
"non-truncated": 1560,
|
883 |
-
"padded": 1560,
|
884 |
-
"non-padded": 0,
|
885 |
-
"effective_few_shots": 5.0,
|
886 |
-
"num_truncated_few_shots": 0
|
887 |
-
},
|
888 |
-
"harness|hendrycksTest-high_school_mathematics|5": {
|
889 |
-
"hashes": {
|
890 |
-
"hash_examples": "1f8ac897608de342",
|
891 |
-
"hash_full_prompts": "5d88f41fc2d643a8",
|
892 |
-
"hash_input_tokens": "6e2577ea4082ed2b",
|
893 |
-
"hash_cont_tokens": "1194078d4e38c984"
|
894 |
-
},
|
895 |
-
"truncated": 0,
|
896 |
-
"non-truncated": 1080,
|
897 |
-
"padded": 1080,
|
898 |
-
"non-padded": 0,
|
899 |
-
"effective_few_shots": 5.0,
|
900 |
-
"num_truncated_few_shots": 0
|
901 |
-
},
|
902 |
-
"harness|hendrycksTest-high_school_microeconomics|5": {
|
903 |
-
"hashes": {
|
904 |
-
"hash_examples": "ead6a0f2f6c83370",
|
905 |
-
"hash_full_prompts": "bfc393381298609e",
|
906 |
-
"hash_input_tokens": "c5fc9aeb1079c8e4",
|
907 |
-
"hash_cont_tokens": "f47f041de50333b9"
|
908 |
-
},
|
909 |
-
"truncated": 0,
|
910 |
-
"non-truncated": 952,
|
911 |
-
"padded": 952,
|
912 |
-
"non-padded": 0,
|
913 |
-
"effective_few_shots": 5.0,
|
914 |
-
"num_truncated_few_shots": 0
|
915 |
-
},
|
916 |
-
"harness|hendrycksTest-high_school_physics|5": {
|
917 |
-
"hashes": {
|
918 |
-
"hash_examples": "c3f2025990afec64",
|
919 |
-
"hash_full_prompts": "fc78b4997e436734",
|
920 |
-
"hash_input_tokens": "555fc385cffa84ca",
|
921 |
-
"hash_cont_tokens": "6296151cf7fee15c"
|
922 |
-
},
|
923 |
-
"truncated": 0,
|
924 |
-
"non-truncated": 604,
|
925 |
-
"padded": 604,
|
926 |
-
"non-padded": 0,
|
927 |
-
"effective_few_shots": 5.0,
|
928 |
-
"num_truncated_few_shots": 0
|
929 |
-
},
|
930 |
-
"harness|hendrycksTest-high_school_psychology|5": {
|
931 |
-
"hashes": {
|
932 |
-
"hash_examples": "21f8aab618f6d636",
|
933 |
-
"hash_full_prompts": "d5c76aa40b9dbc43",
|
934 |
-
"hash_input_tokens": "febd23cbf9973b7f",
|
935 |
-
"hash_cont_tokens": "a490d3db0ea5935a"
|
936 |
-
},
|
937 |
-
"truncated": 0,
|
938 |
-
"non-truncated": 2180,
|
939 |
-
"padded": 2180,
|
940 |
-
"non-padded": 0,
|
941 |
-
"effective_few_shots": 5.0,
|
942 |
-
"num_truncated_few_shots": 0
|
943 |
-
},
|
944 |
-
"harness|hendrycksTest-high_school_statistics|5": {
|
945 |
-
"hashes": {
|
946 |
-
"hash_examples": "2386a60a11fc5de3",
|
947 |
-
"hash_full_prompts": "4c5c8be5aafac432",
|
948 |
-
"hash_input_tokens": "424b02981230ee83",
|
949 |
-
"hash_cont_tokens": "6830ef7d0325d7ef"
|
950 |
-
},
|
951 |
-
"truncated": 0,
|
952 |
-
"non-truncated": 864,
|
953 |
-
"padded": 864,
|
954 |
-
"non-padded": 0,
|
955 |
-
"effective_few_shots": 5.0,
|
956 |
-
"num_truncated_few_shots": 0
|
957 |
-
},
|
958 |
-
"harness|hendrycksTest-high_school_us_history|5": {
|
959 |
-
"hashes": {
|
960 |
-
"hash_examples": "74961543be40f04f",
|
961 |
-
"hash_full_prompts": "5d5ca4840131ba21",
|
962 |
-
"hash_input_tokens": "50c9ff438c85a69e",
|
963 |
-
"hash_cont_tokens": "cdd0b3dc06d933e5"
|
964 |
-
},
|
965 |
-
"truncated": 816,
|
966 |
-
"non-truncated": 0,
|
967 |
-
"padded": 0,
|
968 |
-
"non-padded": 816,
|
969 |
-
"effective_few_shots": 5.0,
|
970 |
-
"num_truncated_few_shots": 0
|
971 |
-
},
|
972 |
-
"harness|hendrycksTest-high_school_world_history|5": {
|
973 |
-
"hashes": {
|
974 |
-
"hash_examples": "2ad2f6b7198b2234",
|
975 |
-
"hash_full_prompts": "11845057459afd72",
|
976 |
-
"hash_input_tokens": "054824cc474caef5",
|
977 |
-
"hash_cont_tokens": "e0203e3fc1bb0500"
|
978 |
-
},
|
979 |
-
"truncated": 8,
|
980 |
-
"non-truncated": 940,
|
981 |
-
"padded": 940,
|
982 |
-
"non-padded": 8,
|
983 |
-
"effective_few_shots": 5.0,
|
984 |
-
"num_truncated_few_shots": 0
|
985 |
-
},
|
986 |
-
"harness|hendrycksTest-human_aging|5": {
|
987 |
-
"hashes": {
|
988 |
-
"hash_examples": "1a7199dc733e779b",
|
989 |
-
"hash_full_prompts": "756b9096b8eaf892",
|
990 |
-
"hash_input_tokens": "541a75f071dcf579",
|
991 |
-
"hash_cont_tokens": "142a4a8a1138a214"
|
992 |
-
},
|
993 |
-
"truncated": 0,
|
994 |
-
"non-truncated": 892,
|
995 |
-
"padded": 892,
|
996 |
-
"non-padded": 0,
|
997 |
-
"effective_few_shots": 5.0,
|
998 |
-
"num_truncated_few_shots": 0
|
999 |
-
},
|
1000 |
-
"harness|hendrycksTest-human_sexuality|5": {
|
1001 |
-
"hashes": {
|
1002 |
-
"hash_examples": "7acb8fdad97f88a6",
|
1003 |
-
"hash_full_prompts": "731a52ff15b8cfdb",
|
1004 |
-
"hash_input_tokens": "04269e5c5a257dd9",
|
1005 |
-
"hash_cont_tokens": "bc54813e809b796d"
|
1006 |
-
},
|
1007 |
-
"truncated": 0,
|
1008 |
-
"non-truncated": 524,
|
1009 |
-
"padded": 524,
|
1010 |
-
"non-padded": 0,
|
1011 |
-
"effective_few_shots": 5.0,
|
1012 |
-
"num_truncated_few_shots": 0
|
1013 |
-
},
|
1014 |
-
"harness|hendrycksTest-international_law|5": {
|
1015 |
-
"hashes": {
|
1016 |
-
"hash_examples": "1300bfd0dfc59114",
|
1017 |
-
"hash_full_prompts": "db2aefbff5eec996",
|
1018 |
-
"hash_input_tokens": "d93ba9d9d38e4397",
|
1019 |
-
"hash_cont_tokens": "63435df622d5437b"
|
1020 |
-
},
|
1021 |
-
"truncated": 0,
|
1022 |
-
"non-truncated": 484,
|
1023 |
-
"padded": 484,
|
1024 |
-
"non-padded": 0,
|
1025 |
-
"effective_few_shots": 5.0,
|
1026 |
-
"num_truncated_few_shots": 0
|
1027 |
-
},
|
1028 |
-
"harness|hendrycksTest-jurisprudence|5": {
|
1029 |
-
"hashes": {
|
1030 |
-
"hash_examples": "083b1e4904c48dc2",
|
1031 |
-
"hash_full_prompts": "0f89ee3fe03d6a21",
|
1032 |
-
"hash_input_tokens": "9eeaccd2698b4f5a",
|
1033 |
-
"hash_cont_tokens": "e3a8cd951b6e3469"
|
1034 |
-
},
|
1035 |
-
"truncated": 0,
|
1036 |
-
"non-truncated": 432,
|
1037 |
-
"padded": 432,
|
1038 |
-
"non-padded": 0,
|
1039 |
-
"effective_few_shots": 5.0,
|
1040 |
-
"num_truncated_few_shots": 0
|
1041 |
-
},
|
1042 |
-
"harness|hendrycksTest-logical_fallacies|5": {
|
1043 |
-
"hashes": {
|
1044 |
-
"hash_examples": "709128f9926a634c",
|
1045 |
-
"hash_full_prompts": "98a04b1f8f841069",
|
1046 |
-
"hash_input_tokens": "b4f08f544f2b7576",
|
1047 |
-
"hash_cont_tokens": "5e6ee2ff0404f23c"
|
1048 |
-
},
|
1049 |
-
"truncated": 0,
|
1050 |
-
"non-truncated": 652,
|
1051 |
-
"padded": 648,
|
1052 |
-
"non-padded": 4,
|
1053 |
-
"effective_few_shots": 5.0,
|
1054 |
-
"num_truncated_few_shots": 0
|
1055 |
-
},
|
1056 |
-
"harness|hendrycksTest-machine_learning|5": {
|
1057 |
-
"hashes": {
|
1058 |
-
"hash_examples": "88f22a636029ae47",
|
1059 |
-
"hash_full_prompts": "2e1c8d4b1e0cc921",
|
1060 |
-
"hash_input_tokens": "900c2a51f1174b9f",
|
1061 |
-
"hash_cont_tokens": "c81919424db3b267"
|
1062 |
-
},
|
1063 |
-
"truncated": 0,
|
1064 |
-
"non-truncated": 448,
|
1065 |
-
"padded": 448,
|
1066 |
-
"non-padded": 0,
|
1067 |
-
"effective_few_shots": 5.0,
|
1068 |
-
"num_truncated_few_shots": 0
|
1069 |
-
},
|
1070 |
-
"harness|hendrycksTest-management|5": {
|
1071 |
-
"hashes": {
|
1072 |
-
"hash_examples": "8c8a1e07a2151dca",
|
1073 |
-
"hash_full_prompts": "f51611f514b265b0",
|
1074 |
-
"hash_input_tokens": "6b36efb4689c6eca",
|
1075 |
-
"hash_cont_tokens": "a01d6d39a83c4597"
|
1076 |
-
},
|
1077 |
-
"truncated": 0,
|
1078 |
-
"non-truncated": 412,
|
1079 |
-
"padded": 412,
|
1080 |
-
"non-padded": 0,
|
1081 |
-
"effective_few_shots": 5.0,
|
1082 |
-
"num_truncated_few_shots": 0
|
1083 |
-
},
|
1084 |
-
"harness|hendrycksTest-marketing|5": {
|
1085 |
-
"hashes": {
|
1086 |
-
"hash_examples": "2668953431f91e96",
|
1087 |
-
"hash_full_prompts": "77562bef997c7650",
|
1088 |
-
"hash_input_tokens": "2aaac78a0cfed47a",
|
1089 |
-
"hash_cont_tokens": "6aeaed4d823c98aa"
|
1090 |
-
},
|
1091 |
-
"truncated": 0,
|
1092 |
-
"non-truncated": 936,
|
1093 |
-
"padded": 936,
|
1094 |
-
"non-padded": 0,
|
1095 |
-
"effective_few_shots": 5.0,
|
1096 |
-
"num_truncated_few_shots": 0
|
1097 |
-
},
|
1098 |
-
"harness|hendrycksTest-medical_genetics|5": {
|
1099 |
-
"hashes": {
|
1100 |
-
"hash_examples": "9c2dda34a2ea4fd2",
|
1101 |
-
"hash_full_prompts": "202139046daa118f",
|
1102 |
-
"hash_input_tokens": "886ca823b41c094a",
|
1103 |
-
"hash_cont_tokens": "50421e30bef398f9"
|
1104 |
-
},
|
1105 |
-
"truncated": 0,
|
1106 |
-
"non-truncated": 400,
|
1107 |
-
"padded": 400,
|
1108 |
-
"non-padded": 0,
|
1109 |
-
"effective_few_shots": 5.0,
|
1110 |
-
"num_truncated_few_shots": 0
|
1111 |
-
},
|
1112 |
-
"harness|hendrycksTest-miscellaneous|5": {
|
1113 |
-
"hashes": {
|
1114 |
-
"hash_examples": "41adb694024809c2",
|
1115 |
-
"hash_full_prompts": "bffec9fc237bcf93",
|
1116 |
-
"hash_input_tokens": "72fd71de7675e7d0",
|
1117 |
-
"hash_cont_tokens": "9b0ab02a64603081"
|
1118 |
-
},
|
1119 |
-
"truncated": 0,
|
1120 |
-
"non-truncated": 3132,
|
1121 |
-
"padded": 3132,
|
1122 |
-
"non-padded": 0,
|
1123 |
-
"effective_few_shots": 5.0,
|
1124 |
-
"num_truncated_few_shots": 0
|
1125 |
-
},
|
1126 |
-
"harness|hendrycksTest-moral_disputes|5": {
|
1127 |
-
"hashes": {
|
1128 |
-
"hash_examples": "3171c13ba3c594c4",
|
1129 |
-
"hash_full_prompts": "170831fc36f1d59e",
|
1130 |
-
"hash_input_tokens": "f3ca0dd8e7a1eb09",
|
1131 |
-
"hash_cont_tokens": "3b8bbe9108e55ce9"
|
1132 |
-
},
|
1133 |
-
"truncated": 0,
|
1134 |
-
"non-truncated": 1384,
|
1135 |
-
"padded": 1354,
|
1136 |
-
"non-padded": 30,
|
1137 |
-
"effective_few_shots": 5.0,
|
1138 |
-
"num_truncated_few_shots": 0
|
1139 |
-
},
|
1140 |
-
"harness|hendrycksTest-moral_scenarios|5": {
|
1141 |
-
"hashes": {
|
1142 |
-
"hash_examples": "9873e077e83e0546",
|
1143 |
-
"hash_full_prompts": "08f4ceba3131a068",
|
1144 |
-
"hash_input_tokens": "3e793631e951f23c",
|
1145 |
-
"hash_cont_tokens": "2eae753a177d5460"
|
1146 |
-
},
|
1147 |
-
"truncated": 0,
|
1148 |
-
"non-truncated": 3580,
|
1149 |
-
"padded": 3580,
|
1150 |
-
"non-padded": 0,
|
1151 |
-
"effective_few_shots": 5.0,
|
1152 |
-
"num_truncated_few_shots": 0
|
1153 |
-
},
|
1154 |
-
"harness|hendrycksTest-nutrition|5": {
|
1155 |
-
"hashes": {
|
1156 |
-
"hash_examples": "7db1d8142ec14323",
|
1157 |
-
"hash_full_prompts": "4c0e68e3586cb453",
|
1158 |
-
"hash_input_tokens": "59753c2144ea93af",
|
1159 |
-
"hash_cont_tokens": "29771089bd3c65c6"
|
1160 |
-
},
|
1161 |
-
"truncated": 0,
|
1162 |
-
"non-truncated": 1224,
|
1163 |
-
"padded": 1224,
|
1164 |
-
"non-padded": 0,
|
1165 |
-
"effective_few_shots": 5.0,
|
1166 |
-
"num_truncated_few_shots": 0
|
1167 |
-
},
|
1168 |
-
"harness|hendrycksTest-philosophy|5": {
|
1169 |
-
"hashes": {
|
1170 |
-
"hash_examples": "9b455b7d72811cc8",
|
1171 |
-
"hash_full_prompts": "e467f822d8a0d3ff",
|
1172 |
-
"hash_input_tokens": "bd8d3dbed15a8c34",
|
1173 |
-
"hash_cont_tokens": "9f6ff69d23a48783"
|
1174 |
-
},
|
1175 |
-
"truncated": 0,
|
1176 |
-
"non-truncated": 1244,
|
1177 |
-
"padded": 1244,
|
1178 |
-
"non-padded": 0,
|
1179 |
-
"effective_few_shots": 5.0,
|
1180 |
-
"num_truncated_few_shots": 0
|
1181 |
-
},
|
1182 |
-
"harness|hendrycksTest-prehistory|5": {
|
1183 |
-
"hashes": {
|
1184 |
-
"hash_examples": "8be90d0f538f1560",
|
1185 |
-
"hash_full_prompts": "152187949bcd0921",
|
1186 |
-
"hash_input_tokens": "3573cd87facbb7c5",
|
1187 |
-
"hash_cont_tokens": "a789a13af22308bf"
|
1188 |
-
},
|
1189 |
-
"truncated": 0,
|
1190 |
-
"non-truncated": 1296,
|
1191 |
-
"padded": 1296,
|
1192 |
-
"non-padded": 0,
|
1193 |
-
"effective_few_shots": 5.0,
|
1194 |
-
"num_truncated_few_shots": 0
|
1195 |
-
},
|
1196 |
-
"harness|hendrycksTest-professional_accounting|5": {
|
1197 |
-
"hashes": {
|
1198 |
-
"hash_examples": "8d377597916cd07e",
|
1199 |
-
"hash_full_prompts": "0eb7345d6144ee0d",
|
1200 |
-
"hash_input_tokens": "17e721bc1a7cbb47",
|
1201 |
-
"hash_cont_tokens": "5129a9cfb30c5239"
|
1202 |
-
},
|
1203 |
-
"truncated": 0,
|
1204 |
-
"non-truncated": 1128,
|
1205 |
-
"padded": 1128,
|
1206 |
-
"non-padded": 0,
|
1207 |
-
"effective_few_shots": 5.0,
|
1208 |
-
"num_truncated_few_shots": 0
|
1209 |
-
},
|
1210 |
-
"harness|hendrycksTest-professional_law|5": {
|
1211 |
-
"hashes": {
|
1212 |
-
"hash_examples": "cd9dbc52b3c932d6",
|
1213 |
-
"hash_full_prompts": "36ac764272bfb182",
|
1214 |
-
"hash_input_tokens": "9178e10bd0763ec4",
|
1215 |
-
"hash_cont_tokens": "2e590029ef41fbcd"
|
1216 |
-
},
|
1217 |
-
"truncated": 604,
|
1218 |
-
"non-truncated": 5532,
|
1219 |
-
"padded": 5524,
|
1220 |
-
"non-padded": 612,
|
1221 |
-
"effective_few_shots": 5.0,
|
1222 |
-
"num_truncated_few_shots": 0
|
1223 |
-
},
|
1224 |
-
"harness|hendrycksTest-professional_medicine|5": {
|
1225 |
-
"hashes": {
|
1226 |
-
"hash_examples": "b20e4e816c1e383e",
|
1227 |
-
"hash_full_prompts": "7b8d69ea2acaf2f7",
|
1228 |
-
"hash_input_tokens": "f5a22012a54f70ea",
|
1229 |
-
"hash_cont_tokens": "cd82e108370cece8"
|
1230 |
-
},
|
1231 |
-
"truncated": 0,
|
1232 |
-
"non-truncated": 1088,
|
1233 |
-
"padded": 1088,
|
1234 |
-
"non-padded": 0,
|
1235 |
-
"effective_few_shots": 5.0,
|
1236 |
-
"num_truncated_few_shots": 0
|
1237 |
-
},
|
1238 |
-
"harness|hendrycksTest-professional_psychology|5": {
|
1239 |
-
"hashes": {
|
1240 |
-
"hash_examples": "d45b73b22f9cc039",
|
1241 |
-
"hash_full_prompts": "fe8937e9ffc99771",
|
1242 |
-
"hash_input_tokens": "0dfb73a8eb3f692c",
|
1243 |
-
"hash_cont_tokens": "61ef0c8a87f9c92d"
|
1244 |
-
},
|
1245 |
-
"truncated": 0,
|
1246 |
-
"non-truncated": 2448,
|
1247 |
-
"padded": 2448,
|
1248 |
-
"non-padded": 0,
|
1249 |
-
"effective_few_shots": 5.0,
|
1250 |
-
"num_truncated_few_shots": 0
|
1251 |
-
},
|
1252 |
-
"harness|hendrycksTest-public_relations|5": {
|
1253 |
-
"hashes": {
|
1254 |
-
"hash_examples": "0d25072e1761652a",
|
1255 |
-
"hash_full_prompts": "f9adc39cfa9f42ba",
|
1256 |
-
"hash_input_tokens": "1710c6ba4c9f3cbd",
|
1257 |
-
"hash_cont_tokens": "568f585a259965c1"
|
1258 |
-
},
|
1259 |
-
"truncated": 0,
|
1260 |
-
"non-truncated": 440,
|
1261 |
-
"padded": 440,
|
1262 |
-
"non-padded": 0,
|
1263 |
-
"effective_few_shots": 5.0,
|
1264 |
-
"num_truncated_few_shots": 0
|
1265 |
-
},
|
1266 |
-
"harness|hendrycksTest-security_studies|5": {
|
1267 |
-
"hashes": {
|
1268 |
-
"hash_examples": "62bb8197e63d60d4",
|
1269 |
-
"hash_full_prompts": "869c9c3ae196b7c3",
|
1270 |
-
"hash_input_tokens": "d49711415961ced7",
|
1271 |
-
"hash_cont_tokens": "d70cfe096d4fb7bd"
|
1272 |
-
},
|
1273 |
-
"truncated": 0,
|
1274 |
-
"non-truncated": 980,
|
1275 |
-
"padded": 980,
|
1276 |
-
"non-padded": 0,
|
1277 |
-
"effective_few_shots": 5.0,
|
1278 |
-
"num_truncated_few_shots": 0
|
1279 |
-
},
|
1280 |
-
"harness|hendrycksTest-sociology|5": {
|
1281 |
-
"hashes": {
|
1282 |
-
"hash_examples": "e7959df87dea8672",
|
1283 |
-
"hash_full_prompts": "1a1fc00e17b3a52a",
|
1284 |
-
"hash_input_tokens": "828999f7624cbe7e",
|
1285 |
-
"hash_cont_tokens": "c3a3bdfd177eed5b"
|
1286 |
-
},
|
1287 |
-
"truncated": 0,
|
1288 |
-
"non-truncated": 804,
|
1289 |
-
"padded": 804,
|
1290 |
-
"non-padded": 0,
|
1291 |
-
"effective_few_shots": 5.0,
|
1292 |
-
"num_truncated_few_shots": 0
|
1293 |
-
},
|
1294 |
-
"harness|hendrycksTest-us_foreign_policy|5": {
|
1295 |
-
"hashes": {
|
1296 |
-
"hash_examples": "4a56a01ddca44dca",
|
1297 |
-
"hash_full_prompts": "0c7a7081c71c07b6",
|
1298 |
-
"hash_input_tokens": "42054621e718dbee",
|
1299 |
-
"hash_cont_tokens": "2568d0e8e36fa959"
|
1300 |
-
},
|
1301 |
-
"truncated": 0,
|
1302 |
-
"non-truncated": 400,
|
1303 |
-
"padded": 400,
|
1304 |
-
"non-padded": 0,
|
1305 |
-
"effective_few_shots": 5.0,
|
1306 |
-
"num_truncated_few_shots": 0
|
1307 |
-
},
|
1308 |
-
"harness|hendrycksTest-virology|5": {
|
1309 |
-
"hashes": {
|
1310 |
-
"hash_examples": "451cc86a8c4f4fe9",
|
1311 |
-
"hash_full_prompts": "01e95325d8b738e4",
|
1312 |
-
"hash_input_tokens": "6c4f0aa4dc859c04",
|
1313 |
-
"hash_cont_tokens": "c178cccd753d9bc5"
|
1314 |
-
},
|
1315 |
-
"truncated": 0,
|
1316 |
-
"non-truncated": 664,
|
1317 |
-
"padded": 664,
|
1318 |
-
"non-padded": 0,
|
1319 |
-
"effective_few_shots": 5.0,
|
1320 |
-
"num_truncated_few_shots": 0
|
1321 |
-
},
|
1322 |
-
"harness|hendrycksTest-world_religions|5": {
|
1323 |
-
"hashes": {
|
1324 |
-
"hash_examples": "3b29cfaf1a81c379",
|
1325 |
-
"hash_full_prompts": "e0d79a15083dfdff",
|
1326 |
-
"hash_input_tokens": "6c75d44e092ff24f",
|
1327 |
-
"hash_cont_tokens": "0a3a3ea5ef49d19c"
|
1328 |
-
},
|
1329 |
-
"truncated": 0,
|
1330 |
-
"non-truncated": 684,
|
1331 |
-
"padded": 684,
|
1332 |
-
"non-padded": 0,
|
1333 |
-
"effective_few_shots": 5.0,
|
1334 |
-
"num_truncated_few_shots": 0
|
1335 |
-
},
|
1336 |
-
"harness|truthfulqa:mc|0": {
|
1337 |
-
"hashes": {
|
1338 |
-
"hash_examples": "23176c0531c7b867",
|
1339 |
-
"hash_full_prompts": "36a6d90e75d92d4a",
|
1340 |
-
"hash_input_tokens": "2738d7ed7075faa7",
|
1341 |
-
"hash_cont_tokens": "6d1691881e252df0"
|
1342 |
-
},
|
1343 |
-
"truncated": 0,
|
1344 |
-
"non-truncated": 9996,
|
1345 |
-
"padded": 9996,
|
1346 |
-
"non-padded": 0,
|
1347 |
-
"effective_few_shots": 0.0,
|
1348 |
-
"num_truncated_few_shots": 0
|
1349 |
-
}
|
1350 |
-
},
|
1351 |
-
"summary_general": {
|
1352 |
-
"hashes": {
|
1353 |
-
"hash_examples": "d84d18e9a963753d",
|
1354 |
-
"hash_full_prompts": "12b540783521a8e6",
|
1355 |
-
"hash_input_tokens": "6fecf578c508db6a",
|
1356 |
-
"hash_cont_tokens": "f4b7b7f3a2788768"
|
1357 |
-
},
|
1358 |
-
"total_evaluation_time_secondes": "25789.163803100586",
|
1359 |
-
"truncated": 2088,
|
1360 |
-
"non-truncated": 108931,
|
1361 |
-
"padded": 108834,
|
1362 |
-
"non-padded": 2185,
|
1363 |
-
"num_truncated_few_shots": 0
|
1364 |
-
}
|
1365 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-65b-instruct/results_2023-10-17T01-44-05.835561.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/llama-65b-instruct",
|
4 |
-
"model_sha": "f70a9865cb0a1ac1157ad928b3b428dd85d52946",
|
5 |
-
"model_size": "121.68 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.454383389261745,
|
17 |
-
"em_stderr": 0.005099113352549085,
|
18 |
-
"f1": 0.5468970218120836,
|
19 |
-
"f1_stderr": 0.004699295426287538
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.2623199393479909,
|
23 |
-
"acc_stderr": 0.012116912419925702
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.8105761641673244,
|
27 |
-
"acc_stderr": 0.011012790432989247
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.454383389261745,
|
31 |
-
"em_stderr": 0.005099113352549085,
|
32 |
-
"f1": 0.5468970218120836,
|
33 |
-
"f1_stderr": 0.004699295426287538,
|
34 |
-
"acc": 0.5364480517576576,
|
35 |
-
"acc_stderr": 0.011564851426457474
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"harness|drop|3": 1,
|
40 |
-
"harness|gsm8k|5": 0,
|
41 |
-
"harness|winogrande|5": 0,
|
42 |
-
"all": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "61b608e0b5ceed76",
|
55 |
-
"hash_cont_tokens": "fbda1bf84bd869c2"
|
56 |
-
},
|
57 |
-
"truncated": 1263,
|
58 |
-
"non-truncated": 8273,
|
59 |
-
"padded": 0,
|
60 |
-
"non-padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "401cd6efe39a68e1"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non-truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non-padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non-truncated": 2534,
|
87 |
-
"padded": 2432,
|
88 |
-
"non-padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "80afe720f936f8d2",
|
98 |
-
"hash_cont_tokens": "4a92d71431dc01ef"
|
99 |
-
},
|
100 |
-
"total_evaluation_time_secondes": "122247.27566027641",
|
101 |
-
"truncated": 1263,
|
102 |
-
"non-truncated": 12126,
|
103 |
-
"padded": 2432,
|
104 |
-
"non-padded": 10957,
|
105 |
-
"num_truncated_few_shots": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
upstage/llama-65b-instruct/results_2023-10-24T19-27-31.642045.json
DELETED
@@ -1,107 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config_general": {
|
3 |
-
"model_name": "upstage/llama-65b-instruct",
|
4 |
-
"model_sha": "f70a9865cb0a1ac1157ad928b3b428dd85d52946",
|
5 |
-
"model_size": "121.68 GB",
|
6 |
-
"model_dtype": "torch.float16",
|
7 |
-
"lighteval_sha": "0f318ecf002208468154899217b3ba7c6ae09374",
|
8 |
-
"num_few_shot_default": 0,
|
9 |
-
"num_fewshot_seeds": 1,
|
10 |
-
"override_batch_size": 1,
|
11 |
-
"max_samples": null,
|
12 |
-
"job_id": ""
|
13 |
-
},
|
14 |
-
"results": {
|
15 |
-
"harness|drop|3": {
|
16 |
-
"em": 0.454383389261745,
|
17 |
-
"em_stderr": 0.005099113352549085,
|
18 |
-
"f1": 0.5468970218120836,
|
19 |
-
"f1_stderr": 0.004699295426287538
|
20 |
-
},
|
21 |
-
"harness|gsm8k|5": {
|
22 |
-
"acc": 0.2623199393479909,
|
23 |
-
"acc_stderr": 0.012116912419925702
|
24 |
-
},
|
25 |
-
"harness|winogrande|5": {
|
26 |
-
"acc": 0.8105761641673244,
|
27 |
-
"acc_stderr": 0.011012790432989247
|
28 |
-
},
|
29 |
-
"all": {
|
30 |
-
"em": 0.454383389261745,
|
31 |
-
"em_stderr": 0.005099113352549085,
|
32 |
-
"f1": 0.5468970218120836,
|
33 |
-
"f1_stderr": 0.004699295426287538,
|
34 |
-
"acc": 0.5364480517576576,
|
35 |
-
"acc_stderr": 0.011564851426457474
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"versions": {
|
39 |
-
"harness|drop|3": 1,
|
40 |
-
"harness|gsm8k|5": 0,
|
41 |
-
"harness|winogrande|5": 0,
|
42 |
-
"all": 0
|
43 |
-
},
|
44 |
-
"config_tasks": {
|
45 |
-
"harness|drop": "LM Harness task",
|
46 |
-
"harness|gsm8k": "LM Harness task",
|
47 |
-
"harness|winogrande": "LM Harness task"
|
48 |
-
},
|
49 |
-
"summary_tasks": {
|
50 |
-
"harness|drop|3": {
|
51 |
-
"hashes": {
|
52 |
-
"hash_examples": "1d27416e8324e9a3",
|
53 |
-
"hash_full_prompts": "a5513ff9a741b385",
|
54 |
-
"hash_input_tokens": "61b608e0b5ceed76",
|
55 |
-
"hash_cont_tokens": "fbda1bf84bd869c2"
|
56 |
-
},
|
57 |
-
"truncated": 1263,
|
58 |
-
"non-truncated": 8273,
|
59 |
-
"padded": 0,
|
60 |
-
"non-padded": 9536,
|
61 |
-
"effective_few_shots": 3.0,
|
62 |
-
"num_truncated_few_shots": 0
|
63 |
-
},
|
64 |
-
"harness|gsm8k|5": {
|
65 |
-
"hashes": {
|
66 |
-
"hash_examples": "4c0843a5d99bcfdc",
|
67 |
-
"hash_full_prompts": "41d55e83abc0e02d",
|
68 |
-
"hash_input_tokens": "bda342e47b5099b2",
|
69 |
-
"hash_cont_tokens": "401cd6efe39a68e1"
|
70 |
-
},
|
71 |
-
"truncated": 0,
|
72 |
-
"non-truncated": 1319,
|
73 |
-
"padded": 0,
|
74 |
-
"non-padded": 1319,
|
75 |
-
"effective_few_shots": 5.0,
|
76 |
-
"num_truncated_few_shots": 0
|
77 |
-
},
|
78 |
-
"harness|winogrande|5": {
|
79 |
-
"hashes": {
|
80 |
-
"hash_examples": "aada0a176fd81218",
|
81 |
-
"hash_full_prompts": "c8655cbd12de8409",
|
82 |
-
"hash_input_tokens": "c0bedf98cb040854",
|
83 |
-
"hash_cont_tokens": "f08975ad6f2d5864"
|
84 |
-
},
|
85 |
-
"truncated": 0,
|
86 |
-
"non-truncated": 2534,
|
87 |
-
"padded": 2432,
|
88 |
-
"non-padded": 102,
|
89 |
-
"effective_few_shots": 5.0,
|
90 |
-
"num_truncated_few_shots": 0
|
91 |
-
}
|
92 |
-
},
|
93 |
-
"summary_general": {
|
94 |
-
"hashes": {
|
95 |
-
"hash_examples": "9b4d8993161e637d",
|
96 |
-
"hash_full_prompts": "08215e527b7e60a5",
|
97 |
-
"hash_input_tokens": "80afe720f936f8d2",
|
98 |
-
"hash_cont_tokens": "4a92d71431dc01ef"
|
99 |
-
},
|
100 |
-
"total_evaluation_time_secondes": "123205.54400348663",
|
101 |
-
"truncated": 1263,
|
102 |
-
"non-truncated": 12126,
|
103 |
-
"padded": 2432,
|
104 |
-
"non-padded": 10957,
|
105 |
-
"num_truncated_few_shots": 0
|
106 |
-
}
|
107 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|