picocreator commited on
Commit
b2a80b7
·
1 Parent(s): aa53828

update results / drop old misplaced results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. lm-eval-output/RedPajama-INCITE-7B-Base/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  2. lm-eval-output/RedPajama-INCITE-7B-Base/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -252
  3. lm-eval-output/RedPajama-INCITE-7B-Base/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +0 -3
  4. lm-eval-output/RedPajama-INCITE-7B-Base/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  5. lm-eval-output/RedPajama-INCITE-7B-Base/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -283
  6. lm-eval-output/RedPajama-INCITE-7B-Base/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +0 -3
  7. lm-eval-output/RedPajama-INCITE-7B-Base/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  8. lm-eval-output/RedPajama-INCITE-7B-Base/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -390
  9. lm-eval-output/RedPajama-INCITE-7B-Base/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +0 -3
  10. lm-eval-output/RedPajama-INCITE-7B-Base/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  11. lm-eval-output/RedPajama-INCITE-7B-Base/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -548
  12. lm-eval-output/RedPajama-INCITE-7B-Base/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +0 -3
  13. lm-eval-output/RedPajama-INCITE-7B-Base/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  14. lm-eval-output/RedPajama-INCITE-7B-Base/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -423
  15. lm-eval-output/RedPajama-INCITE-7B-Base/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +0 -3
  16. lm-eval-output/RedPajama-INCITE-7B-Base/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  17. lm-eval-output/RedPajama-INCITE-7B-Base/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -248
  18. lm-eval-output/RedPajama-INCITE-7B-Base/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +0 -3
  19. lm-eval-output/allenai/OLMo-7B/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  20. lm-eval-output/allenai/OLMo-7B/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +21 -19
  21. lm-eval-output/allenai/OLMo-7B/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  22. lm-eval-output/allenai/OLMo-7B/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  23. lm-eval-output/allenai/OLMo-7B/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +15 -13
  24. lm-eval-output/allenai/OLMo-7B/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  25. lm-eval-output/allenai/OLMo-7B/arithmetic/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  26. lm-eval-output/allenai/OLMo-7B/arithmetic/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +13 -11
  27. lm-eval-output/allenai/OLMo-7B/arithmetic/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  28. lm-eval-output/allenai/OLMo-7B/arithmetic__/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  29. lm-eval-output/allenai/OLMo-7B/arithmetic__/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +9 -7
  30. lm-eval-output/allenai/OLMo-7B/arithmetic__/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  31. lm-eval-output/allenai/OLMo-7B/asdiv/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  32. lm-eval-output/allenai/OLMo-7B/asdiv/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +7 -5
  33. lm-eval-output/allenai/OLMo-7B/asdiv/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  34. lm-eval-output/allenai/OLMo-7B/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  35. lm-eval-output/allenai/OLMo-7B/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +126 -124
  36. lm-eval-output/allenai/OLMo-7B/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  37. lm-eval-output/allenai/OLMo-7B/boolq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  38. lm-eval-output/allenai/OLMo-7B/boolq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -60
  39. lm-eval-output/allenai/OLMo-7B/boolq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  40. lm-eval-output/allenai/OLMo-7B/cb/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  41. lm-eval-output/allenai/OLMo-7B/cb/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +5 -3
  42. lm-eval-output/allenai/OLMo-7B/cb/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  43. lm-eval-output/allenai/OLMo-7B/ceval-valid/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  44. lm-eval-output/allenai/OLMo-7B/ceval-valid/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -2588
  45. lm-eval-output/allenai/OLMo-7B/ceval-valid/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  46. lm-eval-output/allenai/OLMo-7B/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  47. lm-eval-output/allenai/OLMo-7B/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +247 -245
  48. lm-eval-output/allenai/OLMo-7B/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  49. lm-eval-output/allenai/OLMo-7B/cola/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz +0 -3
  50. lm-eval-output/allenai/OLMo-7B/cola/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +7 -5
lm-eval-output/RedPajama-INCITE-7B-Base/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:310cb302ba58221afa5b4b77eded59124462d2e404e554a396360e14e1432d64
3
- size 5210477
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,252 +0,0 @@
1
- {
2
- "results": {
3
- "lambada_multilingual": {
4
- "perplexity,none": 55.75634065211411,
5
- "perplexity_stderr,none": 17.112832544800874,
6
- "acc,none": 0.43578497962352025,
7
- "acc_stderr,none": 0.07783182786033929,
8
- "alias": "lambada_multilingual"
9
- },
10
- "lambada_openai_mt_de": {
11
- "perplexity,none": 87.03760418597274,
12
- "perplexity_stderr,none": 5.220743099786189,
13
- "acc,none": 0.318067145352222,
14
- "acc_stderr,none": 0.006488469772173893,
15
- "alias": " - lambada_openai_mt_de"
16
- },
17
- "lambada_openai_mt_en": {
18
- "perplexity,none": 4.00575099472505,
19
- "perplexity_stderr,none": 0.08559465754530345,
20
- "acc,none": 0.7003687172520862,
21
- "acc_stderr,none": 0.006382179569794074,
22
- "alias": " - lambada_openai_mt_en"
23
- },
24
- "lambada_openai_mt_es": {
25
- "perplexity,none": 74.50603551865778,
26
- "perplexity_stderr,none": 4.146635362251485,
27
- "acc,none": 0.3483407723656123,
28
- "acc_stderr,none": 0.006637805195772818,
29
- "alias": " - lambada_openai_mt_es"
30
- },
31
- "lambada_openai_mt_fr": {
32
- "perplexity,none": 47.60819762333609,
33
- "perplexity_stderr,none": 2.6897251543883476,
34
- "acc,none": 0.42227828449446925,
35
- "acc_stderr,none": 0.006881304773376873,
36
- "alias": " - lambada_openai_mt_fr"
37
- },
38
- "lambada_openai_mt_it": {
39
- "perplexity,none": 65.62411493787893,
40
- "perplexity_stderr,none": 3.9555857520848434,
41
- "acc,none": 0.3898699786532117,
42
- "acc_stderr,none": 0.006794901529888746,
43
- "alias": " - lambada_openai_mt_it"
44
- }
45
- },
46
- "groups": {
47
- "lambada_multilingual": {
48
- "perplexity,none": 55.75634065211411,
49
- "perplexity_stderr,none": 17.112832544800874,
50
- "acc,none": 0.43578497962352025,
51
- "acc_stderr,none": 0.07783182786033929,
52
- "alias": "lambada_multilingual"
53
- }
54
- },
55
- "configs": {
56
- "lambada_openai_mt_de": {
57
- "task": "lambada_openai_mt_de",
58
- "group": [
59
- "lambada_multilingual"
60
- ],
61
- "dataset_path": "EleutherAI/lambada_openai",
62
- "dataset_name": "de",
63
- "test_split": "test",
64
- "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
65
- "doc_to_target": "{{' '+text.split(' ')[-1]}}",
66
- "description": "",
67
- "target_delimiter": " ",
68
- "fewshot_delimiter": "\n\n",
69
- "metric_list": [
70
- {
71
- "metric": "perplexity",
72
- "aggregation": "perplexity",
73
- "higher_is_better": false
74
- },
75
- {
76
- "metric": "acc",
77
- "aggregation": "mean",
78
- "higher_is_better": true
79
- }
80
- ],
81
- "output_type": "loglikelihood",
82
- "repeats": 1,
83
- "should_decontaminate": true,
84
- "doc_to_decontamination_query": "{{text}}",
85
- "metadata": {
86
- "version": 1.0
87
- }
88
- },
89
- "lambada_openai_mt_en": {
90
- "task": "lambada_openai_mt_en",
91
- "group": [
92
- "lambada_multilingual"
93
- ],
94
- "dataset_path": "EleutherAI/lambada_openai",
95
- "dataset_name": "en",
96
- "test_split": "test",
97
- "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
98
- "doc_to_target": "{{' '+text.split(' ')[-1]}}",
99
- "description": "",
100
- "target_delimiter": " ",
101
- "fewshot_delimiter": "\n\n",
102
- "metric_list": [
103
- {
104
- "metric": "perplexity",
105
- "aggregation": "perplexity",
106
- "higher_is_better": false
107
- },
108
- {
109
- "metric": "acc",
110
- "aggregation": "mean",
111
- "higher_is_better": true
112
- }
113
- ],
114
- "output_type": "loglikelihood",
115
- "repeats": 1,
116
- "should_decontaminate": true,
117
- "doc_to_decontamination_query": "{{text}}",
118
- "metadata": {
119
- "version": 1.0
120
- }
121
- },
122
- "lambada_openai_mt_es": {
123
- "task": "lambada_openai_mt_es",
124
- "group": [
125
- "lambada_multilingual"
126
- ],
127
- "dataset_path": "EleutherAI/lambada_openai",
128
- "dataset_name": "es",
129
- "test_split": "test",
130
- "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
131
- "doc_to_target": "{{' '+text.split(' ')[-1]}}",
132
- "description": "",
133
- "target_delimiter": " ",
134
- "fewshot_delimiter": "\n\n",
135
- "metric_list": [
136
- {
137
- "metric": "perplexity",
138
- "aggregation": "perplexity",
139
- "higher_is_better": false
140
- },
141
- {
142
- "metric": "acc",
143
- "aggregation": "mean",
144
- "higher_is_better": true
145
- }
146
- ],
147
- "output_type": "loglikelihood",
148
- "repeats": 1,
149
- "should_decontaminate": true,
150
- "doc_to_decontamination_query": "{{text}}",
151
- "metadata": {
152
- "version": 1.0
153
- }
154
- },
155
- "lambada_openai_mt_fr": {
156
- "task": "lambada_openai_mt_fr",
157
- "group": [
158
- "lambada_multilingual"
159
- ],
160
- "dataset_path": "EleutherAI/lambada_openai",
161
- "dataset_name": "fr",
162
- "test_split": "test",
163
- "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
164
- "doc_to_target": "{{' '+text.split(' ')[-1]}}",
165
- "description": "",
166
- "target_delimiter": " ",
167
- "fewshot_delimiter": "\n\n",
168
- "metric_list": [
169
- {
170
- "metric": "perplexity",
171
- "aggregation": "perplexity",
172
- "higher_is_better": false
173
- },
174
- {
175
- "metric": "acc",
176
- "aggregation": "mean",
177
- "higher_is_better": true
178
- }
179
- ],
180
- "output_type": "loglikelihood",
181
- "repeats": 1,
182
- "should_decontaminate": true,
183
- "doc_to_decontamination_query": "{{text}}",
184
- "metadata": {
185
- "version": 1.0
186
- }
187
- },
188
- "lambada_openai_mt_it": {
189
- "task": "lambada_openai_mt_it",
190
- "group": [
191
- "lambada_multilingual"
192
- ],
193
- "dataset_path": "EleutherAI/lambada_openai",
194
- "dataset_name": "it",
195
- "test_split": "test",
196
- "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
197
- "doc_to_target": "{{' '+text.split(' ')[-1]}}",
198
- "description": "",
199
- "target_delimiter": " ",
200
- "fewshot_delimiter": "\n\n",
201
- "metric_list": [
202
- {
203
- "metric": "perplexity",
204
- "aggregation": "perplexity",
205
- "higher_is_better": false
206
- },
207
- {
208
- "metric": "acc",
209
- "aggregation": "mean",
210
- "higher_is_better": true
211
- }
212
- ],
213
- "output_type": "loglikelihood",
214
- "repeats": 1,
215
- "should_decontaminate": true,
216
- "doc_to_decontamination_query": "{{text}}",
217
- "metadata": {
218
- "version": 1.0
219
- }
220
- }
221
- },
222
- "versions": {
223
- "lambada_multilingual": "N/A",
224
- "lambada_openai_mt_de": 1.0,
225
- "lambada_openai_mt_en": 1.0,
226
- "lambada_openai_mt_es": 1.0,
227
- "lambada_openai_mt_fr": 1.0,
228
- "lambada_openai_mt_it": 1.0
229
- },
230
- "n-shot": {
231
- "lambada_multilingual": 0,
232
- "lambada_openai_mt_de": 0,
233
- "lambada_openai_mt_en": 0,
234
- "lambada_openai_mt_es": 0,
235
- "lambada_openai_mt_fr": 0,
236
- "lambada_openai_mt_it": 0
237
- },
238
- "config": {
239
- "model": "hf",
240
- "model_args": "pretrained=togethercomputer/RedPajama-INCITE-7B-Base,dtype=bfloat16,trust_remote_code=True",
241
- "batch_size": "auto",
242
- "batch_sizes": [
243
- 16
244
- ],
245
- "device": null,
246
- "use_cache": null,
247
- "limit": null,
248
- "bootstrap_iters": 100000,
249
- "gen_kwargs": null
250
- },
251
- "git_hash": "2c0a875"
252
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:82e89447db6627b75a9e38192365c5c94b7c889a1bd3fbc15efae6c880c35cb8
3
- size 189167
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:04f23a0163c17bec0e2b07f28182ec884262d5d00b34ed2e5bb793d22657a6c6
3
- size 2133469
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,283 +0,0 @@
1
- {
2
- "results": {
3
- "pawsx": {
4
- "acc,none": 0.4830714285714286,
5
- "acc_stderr,none": 0.032587798080603594,
6
- "alias": "pawsx"
7
- },
8
- "paws_de": {
9
- "acc,none": 0.4565,
10
- "acc_stderr,none": 0.011140733053371408,
11
- "alias": " - paws_de"
12
- },
13
- "paws_en": {
14
- "acc,none": 0.445,
15
- "acc_stderr,none": 0.011115272135099207,
16
- "alias": " - paws_en"
17
- },
18
- "paws_es": {
19
- "acc,none": 0.412,
20
- "acc_stderr,none": 0.011008569130325172,
21
- "alias": " - paws_es"
22
- },
23
- "paws_fr": {
24
- "acc,none": 0.531,
25
- "acc_stderr,none": 0.011161621338114474,
26
- "alias": " - paws_fr"
27
- },
28
- "paws_ja": {
29
- "acc,none": 0.4825,
30
- "acc_stderr,none": 0.01117628425125418,
31
- "alias": " - paws_ja"
32
- },
33
- "paws_ko": {
34
- "acc,none": 0.524,
35
- "acc_stderr,none": 0.011170245619215438,
36
- "alias": " - paws_ko"
37
- },
38
- "paws_zh": {
39
- "acc,none": 0.5305,
40
- "acc_stderr,none": 0.011162310405413182,
41
- "alias": " - paws_zh"
42
- }
43
- },
44
- "groups": {
45
- "pawsx": {
46
- "acc,none": 0.4830714285714286,
47
- "acc_stderr,none": 0.032587798080603594,
48
- "alias": "pawsx"
49
- }
50
- },
51
- "configs": {
52
- "paws_de": {
53
- "task": "paws_de",
54
- "group": "pawsx",
55
- "dataset_path": "paws-x",
56
- "dataset_name": "de",
57
- "training_split": "train",
58
- "validation_split": "validation",
59
- "test_split": "test",
60
- "doc_to_text": "",
61
- "doc_to_target": "label",
62
- "doc_to_choice": "{{[sentence1+\", richtig? Ja, \"+sentence2, sentence1+\", richtig? Nein, \"+sentence2]}}",
63
- "description": "",
64
- "target_delimiter": " ",
65
- "fewshot_delimiter": "\n\n",
66
- "metric_list": [
67
- {
68
- "metric": "acc",
69
- "aggregation": "mean",
70
- "higher_is_better": true
71
- }
72
- ],
73
- "output_type": "multiple_choice",
74
- "repeats": 1,
75
- "should_decontaminate": false,
76
- "metadata": {
77
- "version": 0.0
78
- }
79
- },
80
- "paws_en": {
81
- "task": "paws_en",
82
- "group": "pawsx",
83
- "dataset_path": "paws-x",
84
- "dataset_name": "en",
85
- "training_split": "train",
86
- "validation_split": "validation",
87
- "test_split": "test",
88
- "doc_to_text": "",
89
- "doc_to_target": "label",
90
- "doc_to_choice": "{{[sentence1+\", right? Yes, \"+sentence2, sentence1+\", right? No, \"+sentence2]}}",
91
- "description": "",
92
- "target_delimiter": " ",
93
- "fewshot_delimiter": "\n\n",
94
- "metric_list": [
95
- {
96
- "metric": "acc",
97
- "aggregation": "mean",
98
- "higher_is_better": true
99
- }
100
- ],
101
- "output_type": "multiple_choice",
102
- "repeats": 1,
103
- "should_decontaminate": false,
104
- "metadata": {
105
- "version": 0.0
106
- }
107
- },
108
- "paws_es": {
109
- "task": "paws_es",
110
- "group": "pawsx",
111
- "dataset_path": "paws-x",
112
- "dataset_name": "es",
113
- "training_split": "train",
114
- "validation_split": "validation",
115
- "test_split": "test",
116
- "doc_to_text": "",
117
- "doc_to_target": "label",
118
- "doc_to_choice": "{{[sentence1+\", verdad? Sí, \"+sentence2, sentence1+\", verdad? No, \"+sentence2]}}",
119
- "description": "",
120
- "target_delimiter": " ",
121
- "fewshot_delimiter": "\n\n",
122
- "metric_list": [
123
- {
124
- "metric": "acc",
125
- "aggregation": "mean",
126
- "higher_is_better": true
127
- }
128
- ],
129
- "output_type": "multiple_choice",
130
- "repeats": 1,
131
- "should_decontaminate": false,
132
- "metadata": {
133
- "version": 0.0
134
- }
135
- },
136
- "paws_fr": {
137
- "task": "paws_fr",
138
- "group": "pawsx",
139
- "dataset_path": "paws-x",
140
- "dataset_name": "fr",
141
- "training_split": "train",
142
- "validation_split": "validation",
143
- "test_split": "test",
144
- "doc_to_text": "",
145
- "doc_to_target": "label",
146
- "doc_to_choice": "{{[sentence1+\", n'est-ce pas? Oui, \"+sentence2, sentence1+\", n'est-ce pas? No, \"+sentence2]}}",
147
- "description": "",
148
- "target_delimiter": " ",
149
- "fewshot_delimiter": "\n\n",
150
- "metric_list": [
151
- {
152
- "metric": "acc",
153
- "aggregation": "mean",
154
- "higher_is_better": true
155
- }
156
- ],
157
- "output_type": "multiple_choice",
158
- "repeats": 1,
159
- "should_decontaminate": false,
160
- "metadata": {
161
- "version": 0.0
162
- }
163
- },
164
- "paws_ja": {
165
- "task": "paws_ja",
166
- "group": "pawsx",
167
- "dataset_path": "paws-x",
168
- "dataset_name": "ja",
169
- "training_split": "train",
170
- "validation_split": "validation",
171
- "test_split": "test",
172
- "doc_to_text": "",
173
- "doc_to_target": "label",
174
- "doc_to_choice": "{{[sentence1+\", ですね? はい, \"+sentence2, sentence1+\", ですね? いいえ, \"+sentence2]}}",
175
- "description": "",
176
- "target_delimiter": " ",
177
- "fewshot_delimiter": "\n\n",
178
- "metric_list": [
179
- {
180
- "metric": "acc",
181
- "aggregation": "mean",
182
- "higher_is_better": true
183
- }
184
- ],
185
- "output_type": "multiple_choice",
186
- "repeats": 1,
187
- "should_decontaminate": false,
188
- "metadata": {
189
- "version": 0.0
190
- }
191
- },
192
- "paws_ko": {
193
- "task": "paws_ko",
194
- "group": "pawsx",
195
- "dataset_path": "paws-x",
196
- "dataset_name": "ko",
197
- "training_split": "train",
198
- "validation_split": "validation",
199
- "test_split": "test",
200
- "doc_to_text": "",
201
- "doc_to_target": "label",
202
- "doc_to_choice": "{{[sentence1+\", 맞죠? 예, \"+sentence2, sentence1+\", 맞죠? 아니요, \"+sentence2]}}",
203
- "description": "",
204
- "target_delimiter": " ",
205
- "fewshot_delimiter": "\n\n",
206
- "metric_list": [
207
- {
208
- "metric": "acc",
209
- "aggregation": "mean",
210
- "higher_is_better": true
211
- }
212
- ],
213
- "output_type": "multiple_choice",
214
- "repeats": 1,
215
- "should_decontaminate": false,
216
- "metadata": {
217
- "version": 0.0
218
- }
219
- },
220
- "paws_zh": {
221
- "task": "paws_zh",
222
- "group": "pawsx",
223
- "dataset_path": "paws-x",
224
- "dataset_name": "zh",
225
- "training_split": "train",
226
- "validation_split": "validation",
227
- "test_split": "test",
228
- "doc_to_text": "",
229
- "doc_to_target": "label",
230
- "doc_to_choice": "{{[sentence1+\", 对吧? 是, \"+sentence2, sentence1+\", 对吧? 不是, \"+sentence2]}}",
231
- "description": "",
232
- "target_delimiter": " ",
233
- "fewshot_delimiter": "\n\n",
234
- "metric_list": [
235
- {
236
- "metric": "acc",
237
- "aggregation": "mean",
238
- "higher_is_better": true
239
- }
240
- ],
241
- "output_type": "multiple_choice",
242
- "repeats": 1,
243
- "should_decontaminate": false,
244
- "metadata": {
245
- "version": 0.0
246
- }
247
- }
248
- },
249
- "versions": {
250
- "paws_de": 0.0,
251
- "paws_en": 0.0,
252
- "paws_es": 0.0,
253
- "paws_fr": 0.0,
254
- "paws_ja": 0.0,
255
- "paws_ko": 0.0,
256
- "paws_zh": 0.0,
257
- "pawsx": "N/A"
258
- },
259
- "n-shot": {
260
- "paws_de": 0,
261
- "paws_en": 0,
262
- "paws_es": 0,
263
- "paws_fr": 0,
264
- "paws_ja": 0,
265
- "paws_ko": 0,
266
- "paws_zh": 0,
267
- "pawsx": 0
268
- },
269
- "config": {
270
- "model": "hf",
271
- "model_args": "pretrained=togethercomputer/RedPajama-INCITE-7B-Base,dtype=bfloat16,trust_remote_code=True",
272
- "batch_size": "auto",
273
- "batch_sizes": [
274
- 16
275
- ],
276
- "device": null,
277
- "use_cache": null,
278
- "limit": null,
279
- "bootstrap_iters": 100000,
280
- "gen_kwargs": null
281
- },
282
- "git_hash": "2c0a875"
283
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:edb8d10109c30a134c04094c23d91f0598a09cc74c21c12cc8180f20db55a520
3
- size 170261
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:87474ecc21f8109f1b039f37e49d45a577e1a9d22ca1cda05cd8fdd45512d25b
3
- size 534066
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,390 +0,0 @@
1
- {
2
- "results": {
3
- "xcopa": {
4
- "acc,none": 0.5254545454545455,
5
- "acc_stderr,none": 0.036407165846333675,
6
- "alias": "xcopa"
7
- },
8
- "xcopa_et": {
9
- "acc,none": 0.492,
10
- "acc_stderr,none": 0.022380208834928035,
11
- "alias": " - xcopa_et"
12
- },
13
- "xcopa_ht": {
14
- "acc,none": 0.502,
15
- "acc_stderr,none": 0.022382894986483524,
16
- "alias": " - xcopa_ht"
17
- },
18
- "xcopa_id": {
19
- "acc,none": 0.54,
20
- "acc_stderr,none": 0.02231133324528966,
21
- "alias": " - xcopa_id"
22
- },
23
- "xcopa_it": {
24
- "acc,none": 0.604,
25
- "acc_stderr,none": 0.021893529941665813,
26
- "alias": " - xcopa_it"
27
- },
28
- "xcopa_qu": {
29
- "acc,none": 0.478,
30
- "acc_stderr,none": 0.02236139673920788,
31
- "alias": " - xcopa_qu"
32
- },
33
- "xcopa_sw": {
34
- "acc,none": 0.522,
35
- "acc_stderr,none": 0.02236139673920788,
36
- "alias": " - xcopa_sw"
37
- },
38
- "xcopa_ta": {
39
- "acc,none": 0.546,
40
- "acc_stderr,none": 0.02228814759117695,
41
- "alias": " - xcopa_ta"
42
- },
43
- "xcopa_th": {
44
- "acc,none": 0.532,
45
- "acc_stderr,none": 0.022337186479044292,
46
- "alias": " - xcopa_th"
47
- },
48
- "xcopa_tr": {
49
- "acc,none": 0.514,
50
- "acc_stderr,none": 0.02237429816635319,
51
- "alias": " - xcopa_tr"
52
- },
53
- "xcopa_vi": {
54
- "acc,none": 0.494,
55
- "acc_stderr,none": 0.022381462412439324,
56
- "alias": " - xcopa_vi"
57
- },
58
- "xcopa_zh": {
59
- "acc,none": 0.556,
60
- "acc_stderr,none": 0.02224224437573102,
61
- "alias": " - xcopa_zh"
62
- }
63
- },
64
- "groups": {
65
- "xcopa": {
66
- "acc,none": 0.5254545454545455,
67
- "acc_stderr,none": 0.036407165846333675,
68
- "alias": "xcopa"
69
- }
70
- },
71
- "configs": {
72
- "xcopa_et": {
73
- "task": "xcopa_et",
74
- "group": "xcopa",
75
- "dataset_path": "xcopa",
76
- "dataset_name": "et",
77
- "validation_split": "validation",
78
- "test_split": "test",
79
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c4837e0>, connector={'cause': 'sest', 'effect': 'seetõttu'})",
80
- "doc_to_target": "label",
81
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
82
- "description": "",
83
- "target_delimiter": " ",
84
- "fewshot_delimiter": "\n\n",
85
- "metric_list": [
86
- {
87
- "metric": "acc"
88
- }
89
- ],
90
- "output_type": "multiple_choice",
91
- "repeats": 1,
92
- "should_decontaminate": false,
93
- "metadata": {
94
- "version": 1.0
95
- }
96
- },
97
- "xcopa_ht": {
98
- "task": "xcopa_ht",
99
- "group": "xcopa",
100
- "dataset_path": "xcopa",
101
- "dataset_name": "ht",
102
- "validation_split": "validation",
103
- "test_split": "test",
104
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c3d36a0>, connector={'cause': 'poukisa', 'effect': 'donk sa'})",
105
- "doc_to_target": "label",
106
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
107
- "description": "",
108
- "target_delimiter": " ",
109
- "fewshot_delimiter": "\n\n",
110
- "metric_list": [
111
- {
112
- "metric": "acc"
113
- }
114
- ],
115
- "output_type": "multiple_choice",
116
- "repeats": 1,
117
- "should_decontaminate": false,
118
- "metadata": {
119
- "version": 1.0
120
- }
121
- },
122
- "xcopa_id": {
123
- "task": "xcopa_id",
124
- "group": "xcopa",
125
- "dataset_path": "xcopa",
126
- "dataset_name": "id",
127
- "validation_split": "validation",
128
- "test_split": "test",
129
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c4825c0>, connector={'cause': 'karena', 'effect': 'maka'})",
130
- "doc_to_target": "label",
131
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
132
- "description": "",
133
- "target_delimiter": " ",
134
- "fewshot_delimiter": "\n\n",
135
- "metric_list": [
136
- {
137
- "metric": "acc"
138
- }
139
- ],
140
- "output_type": "multiple_choice",
141
- "repeats": 1,
142
- "should_decontaminate": false,
143
- "metadata": {
144
- "version": 1.0
145
- }
146
- },
147
- "xcopa_it": {
148
- "task": "xcopa_it",
149
- "group": "xcopa",
150
- "dataset_path": "xcopa",
151
- "dataset_name": "it",
152
- "validation_split": "validation",
153
- "test_split": "test",
154
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c3d2ac0>, connector={'cause': 'perché', 'effect': 'quindi'})",
155
- "doc_to_target": "label",
156
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
157
- "description": "",
158
- "target_delimiter": " ",
159
- "fewshot_delimiter": "\n\n",
160
- "metric_list": [
161
- {
162
- "metric": "acc"
163
- }
164
- ],
165
- "output_type": "multiple_choice",
166
- "repeats": 1,
167
- "should_decontaminate": false,
168
- "metadata": {
169
- "version": 1.0
170
- }
171
- },
172
- "xcopa_qu": {
173
- "task": "xcopa_qu",
174
- "group": "xcopa",
175
- "dataset_path": "xcopa",
176
- "dataset_name": "qu",
177
- "validation_split": "validation",
178
- "test_split": "test",
179
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c3579c0>, connector={'cause': 'imataq', 'effect': 'chaymi'})",
180
- "doc_to_target": "label",
181
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
182
- "description": "",
183
- "target_delimiter": " ",
184
- "fewshot_delimiter": "\n\n",
185
- "metric_list": [
186
- {
187
- "metric": "acc"
188
- }
189
- ],
190
- "output_type": "multiple_choice",
191
- "repeats": 1,
192
- "should_decontaminate": false,
193
- "metadata": {
194
- "version": 1.0
195
- }
196
- },
197
- "xcopa_sw": {
198
- "task": "xcopa_sw",
199
- "group": "xcopa",
200
- "dataset_path": "xcopa",
201
- "dataset_name": "sw",
202
- "validation_split": "validation",
203
- "test_split": "test",
204
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c3d2e80>, connector={'cause': 'kwa sababu', 'effect': 'kwa hiyo'})",
205
- "doc_to_target": "label",
206
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
207
- "description": "",
208
- "target_delimiter": " ",
209
- "fewshot_delimiter": "\n\n",
210
- "metric_list": [
211
- {
212
- "metric": "acc"
213
- }
214
- ],
215
- "output_type": "multiple_choice",
216
- "repeats": 1,
217
- "should_decontaminate": false,
218
- "metadata": {
219
- "version": 1.0
220
- }
221
- },
222
- "xcopa_ta": {
223
- "task": "xcopa_ta",
224
- "group": "xcopa",
225
- "dataset_path": "xcopa",
226
- "dataset_name": "ta",
227
- "validation_split": "validation",
228
- "test_split": "test",
229
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c3d2d40>, connector={'cause': 'காரணமாக', 'effect': 'எனவே'})",
230
- "doc_to_target": "label",
231
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
232
- "description": "",
233
- "target_delimiter": " ",
234
- "fewshot_delimiter": "\n\n",
235
- "metric_list": [
236
- {
237
- "metric": "acc"
238
- }
239
- ],
240
- "output_type": "multiple_choice",
241
- "repeats": 1,
242
- "should_decontaminate": false,
243
- "metadata": {
244
- "version": 1.0
245
- }
246
- },
247
- "xcopa_th": {
248
- "task": "xcopa_th",
249
- "group": "xcopa",
250
- "dataset_path": "xcopa",
251
- "dataset_name": "th",
252
- "validation_split": "validation",
253
- "test_split": "test",
254
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c4816c0>, connector={'cause': 'เพราะ', 'effect': 'ดังนั้น'})",
255
- "doc_to_target": "label",
256
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
257
- "description": "",
258
- "target_delimiter": " ",
259
- "fewshot_delimiter": "\n\n",
260
- "metric_list": [
261
- {
262
- "metric": "acc"
263
- }
264
- ],
265
- "output_type": "multiple_choice",
266
- "repeats": 1,
267
- "should_decontaminate": false,
268
- "metadata": {
269
- "version": 1.0
270
- }
271
- },
272
- "xcopa_tr": {
273
- "task": "xcopa_tr",
274
- "group": "xcopa",
275
- "dataset_path": "xcopa",
276
- "dataset_name": "tr",
277
- "validation_split": "validation",
278
- "test_split": "test",
279
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a7c357b00>, connector={'cause': 'çünkü', 'effect': 'bu yüzden'})",
280
- "doc_to_target": "label",
281
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
282
- "description": "",
283
- "target_delimiter": " ",
284
- "fewshot_delimiter": "\n\n",
285
- "metric_list": [
286
- {
287
- "metric": "acc"
288
- }
289
- ],
290
- "output_type": "multiple_choice",
291
- "repeats": 1,
292
- "should_decontaminate": false,
293
- "metadata": {
294
- "version": 1.0
295
- }
296
- },
297
- "xcopa_vi": {
298
- "task": "xcopa_vi",
299
- "group": "xcopa",
300
- "dataset_path": "xcopa",
301
- "dataset_name": "vi",
302
- "validation_split": "validation",
303
- "test_split": "test",
304
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a90202f20>, connector={'cause': 'bởi vì', 'effect': 'vì vậy'})",
305
- "doc_to_target": "label",
306
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
307
- "description": "",
308
- "target_delimiter": " ",
309
- "fewshot_delimiter": "\n\n",
310
- "metric_list": [
311
- {
312
- "metric": "acc"
313
- }
314
- ],
315
- "output_type": "multiple_choice",
316
- "repeats": 1,
317
- "should_decontaminate": false,
318
- "metadata": {
319
- "version": 1.0
320
- }
321
- },
322
- "xcopa_zh": {
323
- "task": "xcopa_zh",
324
- "group": "xcopa",
325
- "dataset_path": "xcopa",
326
- "dataset_name": "zh",
327
- "validation_split": "validation",
328
- "test_split": "test",
329
- "doc_to_text": "functools.partial(<function doc_to_text at 0x7f1a9edd3380>, connector={'cause': '因为', 'effect': '所以'})",
330
- "doc_to_target": "label",
331
- "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
332
- "description": "",
333
- "target_delimiter": " ",
334
- "fewshot_delimiter": "\n\n",
335
- "metric_list": [
336
- {
337
- "metric": "acc"
338
- }
339
- ],
340
- "output_type": "multiple_choice",
341
- "repeats": 1,
342
- "should_decontaminate": false,
343
- "metadata": {
344
- "version": 1.0
345
- }
346
- }
347
- },
348
- "versions": {
349
- "xcopa": "N/A",
350
- "xcopa_et": 1.0,
351
- "xcopa_ht": 1.0,
352
- "xcopa_id": 1.0,
353
- "xcopa_it": 1.0,
354
- "xcopa_qu": 1.0,
355
- "xcopa_sw": 1.0,
356
- "xcopa_ta": 1.0,
357
- "xcopa_th": 1.0,
358
- "xcopa_tr": 1.0,
359
- "xcopa_vi": 1.0,
360
- "xcopa_zh": 1.0
361
- },
362
- "n-shot": {
363
- "xcopa": 0,
364
- "xcopa_et": 0,
365
- "xcopa_ht": 0,
366
- "xcopa_id": 0,
367
- "xcopa_it": 0,
368
- "xcopa_qu": 0,
369
- "xcopa_sw": 0,
370
- "xcopa_ta": 0,
371
- "xcopa_th": 0,
372
- "xcopa_tr": 0,
373
- "xcopa_vi": 0,
374
- "xcopa_zh": 0
375
- },
376
- "config": {
377
- "model": "hf",
378
- "model_args": "pretrained=togethercomputer/RedPajama-INCITE-7B-Base,dtype=bfloat16,trust_remote_code=True",
379
- "batch_size": "auto",
380
- "batch_sizes": [
381
- 16
382
- ],
383
- "device": null,
384
- "use_cache": null,
385
- "limit": null,
386
- "bootstrap_iters": 100000,
387
- "gen_kwargs": null
388
- },
389
- "git_hash": "2c0a875"
390
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f9a66a8543cd345001ca68fc23fa022eccb3bfb1e5ab51f497c9c70e925df6c3
3
- size 198557
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:84455d99a0f886ce1dc7c7fbfd5a73a3cb9467281241066bb2d1a572c792c3dc
3
- size 6010130
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,548 +0,0 @@
1
- {
2
- "results": {
3
- "xnli": {
4
- "acc,none": 0.3827309236947791,
5
- "acc_stderr,none": 0.05194928176239464,
6
- "alias": "xnli"
7
- },
8
- "xnli_ar": {
9
- "acc,none": 0.344578313253012,
10
- "acc_stderr,none": 0.00952559090011065,
11
- "alias": " - xnli_ar"
12
- },
13
- "xnli_bg": {
14
- "acc,none": 0.37028112449799194,
15
- "acc_stderr,none": 0.00967891540984029,
16
- "alias": " - xnli_bg"
17
- },
18
- "xnli_de": {
19
- "acc,none": 0.4461847389558233,
20
- "acc_stderr,none": 0.00996385427413916,
21
- "alias": " - xnli_de"
22
- },
23
- "xnli_el": {
24
- "acc,none": 0.3481927710843373,
25
- "acc_stderr,none": 0.009548980649153386,
26
- "alias": " - xnli_el"
27
- },
28
- "xnli_en": {
29
- "acc,none": 0.5357429718875502,
30
- "acc_stderr,none": 0.009996432468510355,
31
- "alias": " - xnli_en"
32
- },
33
- "xnli_es": {
34
- "acc,none": 0.41967871485943775,
35
- "acc_stderr,none": 0.009891912665432372,
36
- "alias": " - xnli_es"
37
- },
38
- "xnli_fr": {
39
- "acc,none": 0.46947791164658637,
40
- "acc_stderr,none": 0.010003382355314755,
41
- "alias": " - xnli_fr"
42
- },
43
- "xnli_hi": {
44
- "acc,none": 0.3405622489959839,
45
- "acc_stderr,none": 0.009498886690274447,
46
- "alias": " - xnli_hi"
47
- },
48
- "xnli_ru": {
49
- "acc,none": 0.4433734939759036,
50
- "acc_stderr,none": 0.009957592660538648,
51
- "alias": " - xnli_ru"
52
- },
53
- "xnli_sw": {
54
- "acc,none": 0.3369477911646586,
55
- "acc_stderr,none": 0.009474203778757722,
56
- "alias": " - xnli_sw"
57
- },
58
- "xnli_th": {
59
- "acc,none": 0.3405622489959839,
60
- "acc_stderr,none": 0.009498886690274442,
61
- "alias": " - xnli_th"
62
- },
63
- "xnli_tr": {
64
- "acc,none": 0.3437751004016064,
65
- "acc_stderr,none": 0.009520310502882934,
66
- "alias": " - xnli_tr"
67
- },
68
- "xnli_ur": {
69
- "acc,none": 0.3321285140562249,
70
- "acc_stderr,none": 0.009440328001240636,
71
- "alias": " - xnli_ur"
72
- },
73
- "xnli_vi": {
74
- "acc,none": 0.3309236947791165,
75
- "acc_stderr,none": 0.009431685461463288,
76
- "alias": " - xnli_vi"
77
- },
78
- "xnli_zh": {
79
- "acc,none": 0.3385542168674699,
80
- "acc_stderr,none": 0.009485250208516876,
81
- "alias": " - xnli_zh"
82
- }
83
- },
84
- "groups": {
85
- "xnli": {
86
- "acc,none": 0.3827309236947791,
87
- "acc_stderr,none": 0.05194928176239464,
88
- "alias": "xnli"
89
- }
90
- },
91
- "configs": {
92
- "xnli_ar": {
93
- "task": "xnli_ar",
94
- "group": "xnli",
95
- "dataset_path": "xnli",
96
- "dataset_name": "ar",
97
- "training_split": "train",
98
- "validation_split": "validation",
99
- "doc_to_text": "",
100
- "doc_to_target": "label",
101
- "doc_to_choice": "{{[premise+\", صحيح? نعم, \"+hypothesis,premise+\", صحيح? لذا, \"+hypothesis,premise+\", صحيح? رقم, \"+hypothesis]}}",
102
- "description": "",
103
- "target_delimiter": " ",
104
- "fewshot_delimiter": "\n\n",
105
- "metric_list": [
106
- {
107
- "metric": "acc",
108
- "aggregation": "mean",
109
- "higher_is_better": true
110
- }
111
- ],
112
- "output_type": "multiple_choice",
113
- "repeats": 1,
114
- "should_decontaminate": false,
115
- "metadata": {
116
- "version": 1.0
117
- }
118
- },
119
- "xnli_bg": {
120
- "task": "xnli_bg",
121
- "group": "xnli",
122
- "dataset_path": "xnli",
123
- "dataset_name": "bg",
124
- "training_split": "train",
125
- "validation_split": "validation",
126
- "doc_to_text": "",
127
- "doc_to_target": "label",
128
- "doc_to_choice": "{{[premise+\", правилно? да, \"+hypothesis,premise+\", правилно? така, \"+hypothesis,premise+\", правилно? не, \"+hypothesis]}}",
129
- "description": "",
130
- "target_delimiter": " ",
131
- "fewshot_delimiter": "\n\n",
132
- "metric_list": [
133
- {
134
- "metric": "acc",
135
- "aggregation": "mean",
136
- "higher_is_better": true
137
- }
138
- ],
139
- "output_type": "multiple_choice",
140
- "repeats": 1,
141
- "should_decontaminate": false,
142
- "metadata": {
143
- "version": 1.0
144
- }
145
- },
146
- "xnli_de": {
147
- "task": "xnli_de",
148
- "group": "xnli",
149
- "dataset_path": "xnli",
150
- "dataset_name": "de",
151
- "training_split": "train",
152
- "validation_split": "validation",
153
- "doc_to_text": "",
154
- "doc_to_target": "label",
155
- "doc_to_choice": "{{[premise+\", richtig? Ja, \"+hypothesis,premise+\", richtig? Auch, \"+hypothesis,premise+\", richtig? Nein, \"+hypothesis]}}",
156
- "description": "",
157
- "target_delimiter": " ",
158
- "fewshot_delimiter": "\n\n",
159
- "metric_list": [
160
- {
161
- "metric": "acc",
162
- "aggregation": "mean",
163
- "higher_is_better": true
164
- }
165
- ],
166
- "output_type": "multiple_choice",
167
- "repeats": 1,
168
- "should_decontaminate": false,
169
- "metadata": {
170
- "version": 1.0
171
- }
172
- },
173
- "xnli_el": {
174
- "task": "xnli_el",
175
- "group": "xnli",
176
- "dataset_path": "xnli",
177
- "dataset_name": "el",
178
- "training_split": "train",
179
- "validation_split": "validation",
180
- "doc_to_text": "",
181
- "doc_to_target": "label",
182
- "doc_to_choice": "{{[premise+\", σωστός? Ναί, \"+hypothesis,premise+\", σωστός? Έτσι, \"+hypothesis,premise+\", σωστός? όχι, \"+hypothesis]}}",
183
- "description": "",
184
- "target_delimiter": " ",
185
- "fewshot_delimiter": "\n\n",
186
- "metric_list": [
187
- {
188
- "metric": "acc",
189
- "aggregation": "mean",
190
- "higher_is_better": true
191
- }
192
- ],
193
- "output_type": "multiple_choice",
194
- "repeats": 1,
195
- "should_decontaminate": false,
196
- "metadata": {
197
- "version": 1.0
198
- }
199
- },
200
- "xnli_en": {
201
- "task": "xnli_en",
202
- "group": "xnli",
203
- "dataset_path": "xnli",
204
- "dataset_name": "en",
205
- "training_split": "train",
206
- "validation_split": "validation",
207
- "doc_to_text": "",
208
- "doc_to_target": "label",
209
- "doc_to_choice": "{{[premise+\", right? Yes, \"+hypothesis,premise+\", right? Also, \"+hypothesis,premise+\", right? No, \"+hypothesis]}}",
210
- "description": "",
211
- "target_delimiter": " ",
212
- "fewshot_delimiter": "\n\n",
213
- "metric_list": [
214
- {
215
- "metric": "acc",
216
- "aggregation": "mean",
217
- "higher_is_better": true
218
- }
219
- ],
220
- "output_type": "multiple_choice",
221
- "repeats": 1,
222
- "should_decontaminate": false,
223
- "metadata": {
224
- "version": 1.0
225
- }
226
- },
227
- "xnli_es": {
228
- "task": "xnli_es",
229
- "group": "xnli",
230
- "dataset_path": "xnli",
231
- "dataset_name": "es",
232
- "training_split": "train",
233
- "validation_split": "validation",
234
- "doc_to_text": "",
235
- "doc_to_target": "label",
236
- "doc_to_choice": "{{[premise+\", correcto? Sí, \"+hypothesis,premise+\", correcto? Asi que, \"+hypothesis,premise+\", correcto? No, \"+hypothesis]}}",
237
- "description": "",
238
- "target_delimiter": " ",
239
- "fewshot_delimiter": "\n\n",
240
- "metric_list": [
241
- {
242
- "metric": "acc",
243
- "aggregation": "mean",
244
- "higher_is_better": true
245
- }
246
- ],
247
- "output_type": "multiple_choice",
248
- "repeats": 1,
249
- "should_decontaminate": false,
250
- "metadata": {
251
- "version": 1.0
252
- }
253
- },
254
- "xnli_fr": {
255
- "task": "xnli_fr",
256
- "group": "xnli",
257
- "dataset_path": "xnli",
258
- "dataset_name": "fr",
259
- "training_split": "train",
260
- "validation_split": "validation",
261
- "doc_to_text": "",
262
- "doc_to_target": "label",
263
- "doc_to_choice": "{{[premise+\", correct? Oui, \"+hypothesis,premise+\", correct? Aussi, \"+hypothesis,premise+\", correct? Non, \"+hypothesis]}}",
264
- "description": "",
265
- "target_delimiter": " ",
266
- "fewshot_delimiter": "\n\n",
267
- "metric_list": [
268
- {
269
- "metric": "acc",
270
- "aggregation": "mean",
271
- "higher_is_better": true
272
- }
273
- ],
274
- "output_type": "multiple_choice",
275
- "repeats": 1,
276
- "should_decontaminate": false,
277
- "metadata": {
278
- "version": 1.0
279
- }
280
- },
281
- "xnli_hi": {
282
- "task": "xnli_hi",
283
- "group": "xnli",
284
- "dataset_path": "xnli",
285
- "dataset_name": "hi",
286
- "training_split": "train",
287
- "validation_split": "validation",
288
- "doc_to_text": "",
289
- "doc_to_target": "label",
290
- "doc_to_choice": "{{[premise+\", सही? हाँ, \"+hypothesis,premise+\", सही? इसलिए, \"+hypothesis,premise+\", सही? नहीं, \"+hypothesis]}}",
291
- "description": "",
292
- "target_delimiter": " ",
293
- "fewshot_delimiter": "\n\n",
294
- "metric_list": [
295
- {
296
- "metric": "acc",
297
- "aggregation": "mean",
298
- "higher_is_better": true
299
- }
300
- ],
301
- "output_type": "multiple_choice",
302
- "repeats": 1,
303
- "should_decontaminate": false,
304
- "metadata": {
305
- "version": 1.0
306
- }
307
- },
308
- "xnli_ru": {
309
- "task": "xnli_ru",
310
- "group": "xnli",
311
- "dataset_path": "xnli",
312
- "dataset_name": "ru",
313
- "training_split": "train",
314
- "validation_split": "validation",
315
- "doc_to_text": "",
316
- "doc_to_target": "label",
317
- "doc_to_choice": "{{[premise+\", правильно? Да, \"+hypothesis,premise+\", правильно? Так, \"+hypothesis,premise+\", правильно? Нет, \"+hypothesis]}}",
318
- "description": "",
319
- "target_delimiter": " ",
320
- "fewshot_delimiter": "\n\n",
321
- "metric_list": [
322
- {
323
- "metric": "acc",
324
- "aggregation": "mean",
325
- "higher_is_better": true
326
- }
327
- ],
328
- "output_type": "multiple_choice",
329
- "repeats": 1,
330
- "should_decontaminate": false,
331
- "metadata": {
332
- "version": 1.0
333
- }
334
- },
335
- "xnli_sw": {
336
- "task": "xnli_sw",
337
- "group": "xnli",
338
- "dataset_path": "xnli",
339
- "dataset_name": "sw",
340
- "training_split": "train",
341
- "validation_split": "validation",
342
- "doc_to_text": "",
343
- "doc_to_target": "label",
344
- "doc_to_choice": "{{[premise+\", sahihi? Ndiyo, \"+hypothesis,premise+\", sahihi? Hivyo, \"+hypothesis,premise+\", sahihi? Hapana, \"+hypothesis]}}",
345
- "description": "",
346
- "target_delimiter": " ",
347
- "fewshot_delimiter": "\n\n",
348
- "metric_list": [
349
- {
350
- "metric": "acc",
351
- "aggregation": "mean",
352
- "higher_is_better": true
353
- }
354
- ],
355
- "output_type": "multiple_choice",
356
- "repeats": 1,
357
- "should_decontaminate": false,
358
- "metadata": {
359
- "version": 1.0
360
- }
361
- },
362
- "xnli_th": {
363
- "task": "xnli_th",
364
- "group": "xnli",
365
- "dataset_path": "xnli",
366
- "dataset_name": "th",
367
- "training_split": "train",
368
- "validation_split": "validation",
369
- "doc_to_text": "",
370
- "doc_to_target": "label",
371
- "doc_to_choice": "{{[premise+\", ถูกต้อง? ใช่, \"+hypothesis,premise+\", ถูกต้อง? ดังนั้น, \"+hypothesis,premise+\", ถูกต้อง? ไม่, \"+hypothesis]}}",
372
- "description": "",
373
- "target_delimiter": " ",
374
- "fewshot_delimiter": "\n\n",
375
- "metric_list": [
376
- {
377
- "metric": "acc",
378
- "aggregation": "mean",
379
- "higher_is_better": true
380
- }
381
- ],
382
- "output_type": "multiple_choice",
383
- "repeats": 1,
384
- "should_decontaminate": false,
385
- "metadata": {
386
- "version": 1.0
387
- }
388
- },
389
- "xnli_tr": {
390
- "task": "xnli_tr",
391
- "group": "xnli",
392
- "dataset_path": "xnli",
393
- "dataset_name": "tr",
394
- "training_split": "train",
395
- "validation_split": "validation",
396
- "doc_to_text": "",
397
- "doc_to_target": "label",
398
- "doc_to_choice": "{{[premise+\", doğru? Evet, \"+hypothesis,premise+\", doğru? Böylece, \"+hypothesis,premise+\", doğru? Hayır, \"+hypothesis]}}",
399
- "description": "",
400
- "target_delimiter": " ",
401
- "fewshot_delimiter": "\n\n",
402
- "metric_list": [
403
- {
404
- "metric": "acc",
405
- "aggregation": "mean",
406
- "higher_is_better": true
407
- }
408
- ],
409
- "output_type": "multiple_choice",
410
- "repeats": 1,
411
- "should_decontaminate": false,
412
- "metadata": {
413
- "version": 1.0
414
- }
415
- },
416
- "xnli_ur": {
417
- "task": "xnli_ur",
418
- "group": "xnli",
419
- "dataset_path": "xnli",
420
- "dataset_name": "ur",
421
- "training_split": "train",
422
- "validation_split": "validation",
423
- "doc_to_text": "",
424
- "doc_to_target": "label",
425
- "doc_to_choice": "{{[premise+\", صحیح? جی ہاں, \"+hypothesis,premise+\", صحیح? اس لئے, \"+hypothesis,premise+\", صحیح? نہیں, \"+hypothesis]}}",
426
- "description": "",
427
- "target_delimiter": " ",
428
- "fewshot_delimiter": "\n\n",
429
- "metric_list": [
430
- {
431
- "metric": "acc",
432
- "aggregation": "mean",
433
- "higher_is_better": true
434
- }
435
- ],
436
- "output_type": "multiple_choice",
437
- "repeats": 1,
438
- "should_decontaminate": false,
439
- "metadata": {
440
- "version": 1.0
441
- }
442
- },
443
- "xnli_vi": {
444
- "task": "xnli_vi",
445
- "group": "xnli",
446
- "dataset_path": "xnli",
447
- "dataset_name": "vi",
448
- "training_split": "train",
449
- "validation_split": "validation",
450
- "doc_to_text": "",
451
- "doc_to_target": "label",
452
- "doc_to_choice": "{{[premise+\", đúng? Vâng, \"+hypothesis,premise+\", đúng? Vì vậy, \"+hypothesis,premise+\", đúng? Không, \"+hypothesis]}}",
453
- "description": "",
454
- "target_delimiter": " ",
455
- "fewshot_delimiter": "\n\n",
456
- "metric_list": [
457
- {
458
- "metric": "acc",
459
- "aggregation": "mean",
460
- "higher_is_better": true
461
- }
462
- ],
463
- "output_type": "multiple_choice",
464
- "repeats": 1,
465
- "should_decontaminate": false,
466
- "metadata": {
467
- "version": 1.0
468
- }
469
- },
470
- "xnli_zh": {
471
- "task": "xnli_zh",
472
- "group": "xnli",
473
- "dataset_path": "xnli",
474
- "dataset_name": "zh",
475
- "training_split": "train",
476
- "validation_split": "validation",
477
- "doc_to_text": "",
478
- "doc_to_target": "label",
479
- "doc_to_choice": "{{[premise+\", 正确? 是的, \"+hypothesis,premise+\", 正确? 所以, \"+hypothesis,premise+\", 正确? 不是的, \"+hypothesis]}}",
480
- "description": "",
481
- "target_delimiter": " ",
482
- "fewshot_delimiter": "\n\n",
483
- "metric_list": [
484
- {
485
- "metric": "acc",
486
- "aggregation": "mean",
487
- "higher_is_better": true
488
- }
489
- ],
490
- "output_type": "multiple_choice",
491
- "repeats": 1,
492
- "should_decontaminate": false,
493
- "metadata": {
494
- "version": 1.0
495
- }
496
- }
497
- },
498
- "versions": {
499
- "xnli": "N/A",
500
- "xnli_ar": 1.0,
501
- "xnli_bg": 1.0,
502
- "xnli_de": 1.0,
503
- "xnli_el": 1.0,
504
- "xnli_en": 1.0,
505
- "xnli_es": 1.0,
506
- "xnli_fr": 1.0,
507
- "xnli_hi": 1.0,
508
- "xnli_ru": 1.0,
509
- "xnli_sw": 1.0,
510
- "xnli_th": 1.0,
511
- "xnli_tr": 1.0,
512
- "xnli_ur": 1.0,
513
- "xnli_vi": 1.0,
514
- "xnli_zh": 1.0
515
- },
516
- "n-shot": {
517
- "xnli": 0,
518
- "xnli_ar": 0,
519
- "xnli_bg": 0,
520
- "xnli_de": 0,
521
- "xnli_el": 0,
522
- "xnli_en": 0,
523
- "xnli_es": 0,
524
- "xnli_fr": 0,
525
- "xnli_hi": 0,
526
- "xnli_ru": 0,
527
- "xnli_sw": 0,
528
- "xnli_th": 0,
529
- "xnli_tr": 0,
530
- "xnli_ur": 0,
531
- "xnli_vi": 0,
532
- "xnli_zh": 0
533
- },
534
- "config": {
535
- "model": "hf",
536
- "model_args": "pretrained=togethercomputer/RedPajama-INCITE-7B-Base,dtype=bfloat16,trust_remote_code=True",
537
- "batch_size": "auto",
538
- "batch_sizes": [
539
- 8
540
- ],
541
- "device": null,
542
- "use_cache": null,
543
- "limit": null,
544
- "bootstrap_iters": 100000,
545
- "gen_kwargs": null
546
- },
547
- "git_hash": "2c0a875"
548
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:cfe49c26553fc37efb6944cdf331e11215c8f870ff6c85a92c9f460560f3348b
3
- size 128783
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:56d33ec3d4d975e330cf2e7f94a5c0309d5c4a0cbbfcbb7405b27a7db626c574
3
- size 4064576
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,423 +0,0 @@
1
- {
2
- "results": {
3
- "xstorycloze": {
4
- "acc,none": 0.5524336682510078,
5
- "acc_stderr,none": 0.06791399332607427,
6
- "alias": "xstorycloze"
7
- },
8
- "xstorycloze_ar": {
9
- "acc,none": 0.4798146922567836,
10
- "acc_stderr,none": 0.01285663570649829,
11
- "alias": " - xstorycloze_ar"
12
- },
13
- "xstorycloze_en": {
14
- "acc,none": 0.7485109199205824,
15
- "acc_stderr,none": 0.011165293988715807,
16
- "alias": " - xstorycloze_en"
17
- },
18
- "xstorycloze_es": {
19
- "acc,none": 0.6393117140966248,
20
- "acc_stderr,none": 0.012357592682139025,
21
- "alias": " - xstorycloze_es"
22
- },
23
- "xstorycloze_eu": {
24
- "acc,none": 0.514228987425546,
25
- "acc_stderr,none": 0.012861913999596127,
26
- "alias": " - xstorycloze_eu"
27
- },
28
- "xstorycloze_hi": {
29
- "acc,none": 0.513567174056916,
30
- "acc_stderr,none": 0.01286238758665008,
31
- "alias": " - xstorycloze_hi"
32
- },
33
- "xstorycloze_id": {
34
- "acc,none": 0.513567174056916,
35
- "acc_stderr,none": 0.01286238758665008,
36
- "alias": " - xstorycloze_id"
37
- },
38
- "xstorycloze_my": {
39
- "acc,none": 0.48974189278623426,
40
- "acc_stderr,none": 0.012864417047980477,
41
- "alias": " - xstorycloze_my"
42
- },
43
- "xstorycloze_ru": {
44
- "acc,none": 0.5823957643944407,
45
- "acc_stderr,none": 0.012691211382848643,
46
- "alias": " - xstorycloze_ru"
47
- },
48
- "xstorycloze_sw": {
49
- "acc,none": 0.514228987425546,
50
- "acc_stderr,none": 0.012861913999596127,
51
- "alias": " - xstorycloze_sw"
52
- },
53
- "xstorycloze_te": {
54
- "acc,none": 0.5327597617471873,
55
- "acc_stderr,none": 0.012839477563855927,
56
- "alias": " - xstorycloze_te"
57
- },
58
- "xstorycloze_zh": {
59
- "acc,none": 0.5486432825943084,
60
- "acc_stderr,none": 0.0128060889661224,
61
- "alias": " - xstorycloze_zh"
62
- }
63
- },
64
- "groups": {
65
- "xstorycloze": {
66
- "acc,none": 0.5524336682510078,
67
- "acc_stderr,none": 0.06791399332607427,
68
- "alias": "xstorycloze"
69
- }
70
- },
71
- "configs": {
72
- "xstorycloze_ar": {
73
- "task": "xstorycloze_ar",
74
- "group": "xstorycloze",
75
- "dataset_path": "juletxara/xstory_cloze",
76
- "dataset_name": "ar",
77
- "training_split": "train",
78
- "validation_split": "eval",
79
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
80
- "doc_to_target": "{{answer_right_ending-1}}",
81
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
82
- "description": "",
83
- "target_delimiter": " ",
84
- "fewshot_delimiter": "\n\n",
85
- "metric_list": [
86
- {
87
- "metric": "acc",
88
- "aggregation": "mean",
89
- "higher_is_better": true
90
- }
91
- ],
92
- "output_type": "multiple_choice",
93
- "repeats": 1,
94
- "should_decontaminate": true,
95
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
96
- "metadata": {
97
- "version": 1.0
98
- }
99
- },
100
- "xstorycloze_en": {
101
- "task": "xstorycloze_en",
102
- "group": "xstorycloze",
103
- "dataset_path": "juletxara/xstory_cloze",
104
- "dataset_name": "en",
105
- "training_split": "train",
106
- "validation_split": "eval",
107
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
108
- "doc_to_target": "{{answer_right_ending-1}}",
109
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
110
- "description": "",
111
- "target_delimiter": " ",
112
- "fewshot_delimiter": "\n\n",
113
- "metric_list": [
114
- {
115
- "metric": "acc",
116
- "aggregation": "mean",
117
- "higher_is_better": true
118
- }
119
- ],
120
- "output_type": "multiple_choice",
121
- "repeats": 1,
122
- "should_decontaminate": true,
123
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
124
- "metadata": {
125
- "version": 1.0
126
- }
127
- },
128
- "xstorycloze_es": {
129
- "task": "xstorycloze_es",
130
- "group": "xstorycloze",
131
- "dataset_path": "juletxara/xstory_cloze",
132
- "dataset_name": "es",
133
- "training_split": "train",
134
- "validation_split": "eval",
135
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
136
- "doc_to_target": "{{answer_right_ending-1}}",
137
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
138
- "description": "",
139
- "target_delimiter": " ",
140
- "fewshot_delimiter": "\n\n",
141
- "metric_list": [
142
- {
143
- "metric": "acc",
144
- "aggregation": "mean",
145
- "higher_is_better": true
146
- }
147
- ],
148
- "output_type": "multiple_choice",
149
- "repeats": 1,
150
- "should_decontaminate": true,
151
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
152
- "metadata": {
153
- "version": 1.0
154
- }
155
- },
156
- "xstorycloze_eu": {
157
- "task": "xstorycloze_eu",
158
- "group": "xstorycloze",
159
- "dataset_path": "juletxara/xstory_cloze",
160
- "dataset_name": "eu",
161
- "training_split": "train",
162
- "validation_split": "eval",
163
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
164
- "doc_to_target": "{{answer_right_ending-1}}",
165
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
166
- "description": "",
167
- "target_delimiter": " ",
168
- "fewshot_delimiter": "\n\n",
169
- "metric_list": [
170
- {
171
- "metric": "acc",
172
- "aggregation": "mean",
173
- "higher_is_better": true
174
- }
175
- ],
176
- "output_type": "multiple_choice",
177
- "repeats": 1,
178
- "should_decontaminate": true,
179
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
180
- "metadata": {
181
- "version": 1.0
182
- }
183
- },
184
- "xstorycloze_hi": {
185
- "task": "xstorycloze_hi",
186
- "group": "xstorycloze",
187
- "dataset_path": "juletxara/xstory_cloze",
188
- "dataset_name": "hi",
189
- "training_split": "train",
190
- "validation_split": "eval",
191
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
192
- "doc_to_target": "{{answer_right_ending-1}}",
193
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
194
- "description": "",
195
- "target_delimiter": " ",
196
- "fewshot_delimiter": "\n\n",
197
- "metric_list": [
198
- {
199
- "metric": "acc",
200
- "aggregation": "mean",
201
- "higher_is_better": true
202
- }
203
- ],
204
- "output_type": "multiple_choice",
205
- "repeats": 1,
206
- "should_decontaminate": true,
207
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
208
- "metadata": {
209
- "version": 1.0
210
- }
211
- },
212
- "xstorycloze_id": {
213
- "task": "xstorycloze_id",
214
- "group": "xstorycloze",
215
- "dataset_path": "juletxara/xstory_cloze",
216
- "dataset_name": "id",
217
- "training_split": "train",
218
- "validation_split": "eval",
219
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
220
- "doc_to_target": "{{answer_right_ending-1}}",
221
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
222
- "description": "",
223
- "target_delimiter": " ",
224
- "fewshot_delimiter": "\n\n",
225
- "metric_list": [
226
- {
227
- "metric": "acc",
228
- "aggregation": "mean",
229
- "higher_is_better": true
230
- }
231
- ],
232
- "output_type": "multiple_choice",
233
- "repeats": 1,
234
- "should_decontaminate": true,
235
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
236
- "metadata": {
237
- "version": 1.0
238
- }
239
- },
240
- "xstorycloze_my": {
241
- "task": "xstorycloze_my",
242
- "group": "xstorycloze",
243
- "dataset_path": "juletxara/xstory_cloze",
244
- "dataset_name": "my",
245
- "training_split": "train",
246
- "validation_split": "eval",
247
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
248
- "doc_to_target": "{{answer_right_ending-1}}",
249
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
250
- "description": "",
251
- "target_delimiter": " ",
252
- "fewshot_delimiter": "\n\n",
253
- "metric_list": [
254
- {
255
- "metric": "acc",
256
- "aggregation": "mean",
257
- "higher_is_better": true
258
- }
259
- ],
260
- "output_type": "multiple_choice",
261
- "repeats": 1,
262
- "should_decontaminate": true,
263
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
264
- "metadata": {
265
- "version": 1.0
266
- }
267
- },
268
- "xstorycloze_ru": {
269
- "task": "xstorycloze_ru",
270
- "group": "xstorycloze",
271
- "dataset_path": "juletxara/xstory_cloze",
272
- "dataset_name": "ru",
273
- "training_split": "train",
274
- "validation_split": "eval",
275
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
276
- "doc_to_target": "{{answer_right_ending-1}}",
277
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
278
- "description": "",
279
- "target_delimiter": " ",
280
- "fewshot_delimiter": "\n\n",
281
- "metric_list": [
282
- {
283
- "metric": "acc",
284
- "aggregation": "mean",
285
- "higher_is_better": true
286
- }
287
- ],
288
- "output_type": "multiple_choice",
289
- "repeats": 1,
290
- "should_decontaminate": true,
291
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
292
- "metadata": {
293
- "version": 1.0
294
- }
295
- },
296
- "xstorycloze_sw": {
297
- "task": "xstorycloze_sw",
298
- "group": "xstorycloze",
299
- "dataset_path": "juletxara/xstory_cloze",
300
- "dataset_name": "sw",
301
- "training_split": "train",
302
- "validation_split": "eval",
303
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
304
- "doc_to_target": "{{answer_right_ending-1}}",
305
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
306
- "description": "",
307
- "target_delimiter": " ",
308
- "fewshot_delimiter": "\n\n",
309
- "metric_list": [
310
- {
311
- "metric": "acc",
312
- "aggregation": "mean",
313
- "higher_is_better": true
314
- }
315
- ],
316
- "output_type": "multiple_choice",
317
- "repeats": 1,
318
- "should_decontaminate": true,
319
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
320
- "metadata": {
321
- "version": 1.0
322
- }
323
- },
324
- "xstorycloze_te": {
325
- "task": "xstorycloze_te",
326
- "group": "xstorycloze",
327
- "dataset_path": "juletxara/xstory_cloze",
328
- "dataset_name": "te",
329
- "training_split": "train",
330
- "validation_split": "eval",
331
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
332
- "doc_to_target": "{{answer_right_ending-1}}",
333
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
334
- "description": "",
335
- "target_delimiter": " ",
336
- "fewshot_delimiter": "\n\n",
337
- "metric_list": [
338
- {
339
- "metric": "acc",
340
- "aggregation": "mean",
341
- "higher_is_better": true
342
- }
343
- ],
344
- "output_type": "multiple_choice",
345
- "repeats": 1,
346
- "should_decontaminate": true,
347
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
348
- "metadata": {
349
- "version": 1.0
350
- }
351
- },
352
- "xstorycloze_zh": {
353
- "task": "xstorycloze_zh",
354
- "group": "xstorycloze",
355
- "dataset_path": "juletxara/xstory_cloze",
356
- "dataset_name": "zh",
357
- "training_split": "train",
358
- "validation_split": "eval",
359
- "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
360
- "doc_to_target": "{{answer_right_ending-1}}",
361
- "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
362
- "description": "",
363
- "target_delimiter": " ",
364
- "fewshot_delimiter": "\n\n",
365
- "metric_list": [
366
- {
367
- "metric": "acc",
368
- "aggregation": "mean",
369
- "higher_is_better": true
370
- }
371
- ],
372
- "output_type": "multiple_choice",
373
- "repeats": 1,
374
- "should_decontaminate": true,
375
- "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
376
- "metadata": {
377
- "version": 1.0
378
- }
379
- }
380
- },
381
- "versions": {
382
- "xstorycloze": "N/A",
383
- "xstorycloze_ar": 1.0,
384
- "xstorycloze_en": 1.0,
385
- "xstorycloze_es": 1.0,
386
- "xstorycloze_eu": 1.0,
387
- "xstorycloze_hi": 1.0,
388
- "xstorycloze_id": 1.0,
389
- "xstorycloze_my": 1.0,
390
- "xstorycloze_ru": 1.0,
391
- "xstorycloze_sw": 1.0,
392
- "xstorycloze_te": 1.0,
393
- "xstorycloze_zh": 1.0
394
- },
395
- "n-shot": {
396
- "xstorycloze": 0,
397
- "xstorycloze_ar": 0,
398
- "xstorycloze_en": 0,
399
- "xstorycloze_es": 0,
400
- "xstorycloze_eu": 0,
401
- "xstorycloze_hi": 0,
402
- "xstorycloze_id": 0,
403
- "xstorycloze_my": 0,
404
- "xstorycloze_ru": 0,
405
- "xstorycloze_sw": 0,
406
- "xstorycloze_te": 0,
407
- "xstorycloze_zh": 0
408
- },
409
- "config": {
410
- "model": "hf",
411
- "model_args": "pretrained=togethercomputer/RedPajama-INCITE-7B-Base,dtype=bfloat16,trust_remote_code=True",
412
- "batch_size": "auto",
413
- "batch_sizes": [
414
- 4
415
- ],
416
- "device": null,
417
- "use_cache": null,
418
- "limit": null,
419
- "bootstrap_iters": 100000,
420
- "gen_kwargs": null
421
- },
422
- "git_hash": "2c0a875"
423
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6e9f5ba42ffa2ea898c14805d192629b5e27ca22869b18fe497db1c273ad30d8
3
- size 57015
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7db16581994b54191b3fb5565ec9226fdb81757903fac634d148eb37ebcd4a0
3
- size 513604
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,248 +0,0 @@
1
- {
2
- "results": {
3
- "xwinograd": {
4
- "acc,none": 0.74331310406833,
5
- "acc_stderr,none": 0.08408219267029617,
6
- "alias": "xwinograd"
7
- },
8
- "xwinograd_en": {
9
- "acc,none": 0.8718279569892473,
10
- "acc_stderr,none": 0.006934162057729827,
11
- "alias": " - xwinograd_en"
12
- },
13
- "xwinograd_fr": {
14
- "acc,none": 0.6385542168674698,
15
- "acc_stderr,none": 0.053053439348320096,
16
- "alias": " - xwinograd_fr"
17
- },
18
- "xwinograd_jp": {
19
- "acc,none": 0.5620437956204379,
20
- "acc_stderr,none": 0.016029414748731596,
21
- "alias": " - xwinograd_jp"
22
- },
23
- "xwinograd_pt": {
24
- "acc,none": 0.6653992395437263,
25
- "acc_stderr,none": 0.02915103415331038,
26
- "alias": " - xwinograd_pt"
27
- },
28
- "xwinograd_ru": {
29
- "acc,none": 0.6222222222222222,
30
- "acc_stderr,none": 0.0273606328610564,
31
- "alias": " - xwinograd_ru"
32
- },
33
- "xwinograd_zh": {
34
- "acc,none": 0.628968253968254,
35
- "acc_stderr,none": 0.02153951426767635,
36
- "alias": " - xwinograd_zh"
37
- }
38
- },
39
- "groups": {
40
- "xwinograd": {
41
- "acc,none": 0.74331310406833,
42
- "acc_stderr,none": 0.08408219267029617,
43
- "alias": "xwinograd"
44
- }
45
- },
46
- "configs": {
47
- "xwinograd_en": {
48
- "task": "xwinograd_en",
49
- "group": [
50
- "xwinograd"
51
- ],
52
- "dataset_path": "Muennighoff/xwinograd",
53
- "dataset_name": "en",
54
- "test_split": "test",
55
- "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
56
- "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
57
- "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
58
- "description": "",
59
- "target_delimiter": " ",
60
- "fewshot_delimiter": "\n\n",
61
- "metric_list": [
62
- {
63
- "metric": "acc",
64
- "aggregation": "mean",
65
- "higher_is_better": true
66
- }
67
- ],
68
- "output_type": "multiple_choice",
69
- "repeats": 1,
70
- "should_decontaminate": false,
71
- "metadata": {
72
- "version": 1.0
73
- }
74
- },
75
- "xwinograd_fr": {
76
- "task": "xwinograd_fr",
77
- "group": [
78
- "xwinograd"
79
- ],
80
- "dataset_path": "Muennighoff/xwinograd",
81
- "dataset_name": "fr",
82
- "test_split": "test",
83
- "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
84
- "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
85
- "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
86
- "description": "",
87
- "target_delimiter": " ",
88
- "fewshot_delimiter": "\n\n",
89
- "metric_list": [
90
- {
91
- "metric": "acc",
92
- "aggregation": "mean",
93
- "higher_is_better": true
94
- }
95
- ],
96
- "output_type": "multiple_choice",
97
- "repeats": 1,
98
- "should_decontaminate": false,
99
- "metadata": {
100
- "version": 1.0
101
- }
102
- },
103
- "xwinograd_jp": {
104
- "task": "xwinograd_jp",
105
- "group": [
106
- "xwinograd"
107
- ],
108
- "dataset_path": "Muennighoff/xwinograd",
109
- "dataset_name": "jp",
110
- "test_split": "test",
111
- "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
112
- "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
113
- "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
114
- "description": "",
115
- "target_delimiter": " ",
116
- "fewshot_delimiter": "\n\n",
117
- "metric_list": [
118
- {
119
- "metric": "acc",
120
- "aggregation": "mean",
121
- "higher_is_better": true
122
- }
123
- ],
124
- "output_type": "multiple_choice",
125
- "repeats": 1,
126
- "should_decontaminate": false,
127
- "metadata": {
128
- "version": 1.0
129
- }
130
- },
131
- "xwinograd_pt": {
132
- "task": "xwinograd_pt",
133
- "group": [
134
- "xwinograd"
135
- ],
136
- "dataset_path": "Muennighoff/xwinograd",
137
- "dataset_name": "pt",
138
- "test_split": "test",
139
- "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
140
- "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
141
- "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
142
- "description": "",
143
- "target_delimiter": " ",
144
- "fewshot_delimiter": "\n\n",
145
- "metric_list": [
146
- {
147
- "metric": "acc",
148
- "aggregation": "mean",
149
- "higher_is_better": true
150
- }
151
- ],
152
- "output_type": "multiple_choice",
153
- "repeats": 1,
154
- "should_decontaminate": false,
155
- "metadata": {
156
- "version": 1.0
157
- }
158
- },
159
- "xwinograd_ru": {
160
- "task": "xwinograd_ru",
161
- "group": [
162
- "xwinograd"
163
- ],
164
- "dataset_path": "Muennighoff/xwinograd",
165
- "dataset_name": "ru",
166
- "test_split": "test",
167
- "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
168
- "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
169
- "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
170
- "description": "",
171
- "target_delimiter": " ",
172
- "fewshot_delimiter": "\n\n",
173
- "metric_list": [
174
- {
175
- "metric": "acc",
176
- "aggregation": "mean",
177
- "higher_is_better": true
178
- }
179
- ],
180
- "output_type": "multiple_choice",
181
- "repeats": 1,
182
- "should_decontaminate": false,
183
- "metadata": {
184
- "version": 1.0
185
- }
186
- },
187
- "xwinograd_zh": {
188
- "task": "xwinograd_zh",
189
- "group": [
190
- "xwinograd"
191
- ],
192
- "dataset_path": "Muennighoff/xwinograd",
193
- "dataset_name": "zh",
194
- "test_split": "test",
195
- "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
196
- "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
197
- "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
198
- "description": "",
199
- "target_delimiter": " ",
200
- "fewshot_delimiter": "\n\n",
201
- "metric_list": [
202
- {
203
- "metric": "acc",
204
- "aggregation": "mean",
205
- "higher_is_better": true
206
- }
207
- ],
208
- "output_type": "multiple_choice",
209
- "repeats": 1,
210
- "should_decontaminate": false,
211
- "metadata": {
212
- "version": 1.0
213
- }
214
- }
215
- },
216
- "versions": {
217
- "xwinograd": "N/A",
218
- "xwinograd_en": 1.0,
219
- "xwinograd_fr": 1.0,
220
- "xwinograd_jp": 1.0,
221
- "xwinograd_pt": 1.0,
222
- "xwinograd_ru": 1.0,
223
- "xwinograd_zh": 1.0
224
- },
225
- "n-shot": {
226
- "xwinograd": 0,
227
- "xwinograd_en": 0,
228
- "xwinograd_fr": 0,
229
- "xwinograd_jp": 0,
230
- "xwinograd_pt": 0,
231
- "xwinograd_ru": 0,
232
- "xwinograd_zh": 0
233
- },
234
- "config": {
235
- "model": "hf",
236
- "model_args": "pretrained=togethercomputer/RedPajama-INCITE-7B-Base,dtype=bfloat16,trust_remote_code=True",
237
- "batch_size": "auto",
238
- "batch_sizes": [
239
- 16
240
- ],
241
- "device": null,
242
- "use_cache": null,
243
- "limit": null,
244
- "bootstrap_iters": 100000,
245
- "gen_kwargs": null
246
- },
247
- "git_hash": "2c0a875"
248
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/RedPajama-INCITE-7B-Base/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6e89625fbf308af1fff3236b77553d7bffe3d2d7c54cab2b132c43f2ff9e0992
3
- size 40851
 
 
 
 
lm-eval-output/allenai/OLMo-7B/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a7b6162eaecf35e40162218a7076222880e68797aaeb567a8713ff06ca3b6935
3
- size 681726
 
 
 
 
lm-eval-output/allenai/OLMo-7B/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,33 +1,33 @@
1
  {
2
  "results": {
3
  "ai2_arc": {
4
- "acc,none": 0.6124577226606539,
5
- "acc_stderr,none": 0.05783349554301207,
6
- "acc_norm,none": 0.5913190529875987,
7
- "acc_norm_stderr,none": 0.04520523788851735,
8
  "alias": "ai2_arc"
9
  },
10
  "arc_challenge": {
11
- "acc,none": 0.3677474402730375,
12
- "acc_stderr,none": 0.014090995618168475,
13
- "acc_norm,none": 0.40273037542662116,
14
- "acc_norm_stderr,none": 0.01433223630679014,
15
  "alias": " - arc_challenge"
16
  },
17
  "arc_easy": {
18
- "acc,none": 0.7331649831649831,
19
- "acc_stderr,none": 0.009075915859267257,
20
- "acc_norm,none": 0.6843434343434344,
21
- "acc_norm_stderr,none": 0.009537019245566087,
22
  "alias": " - arc_easy"
23
  }
24
  },
25
  "groups": {
26
  "ai2_arc": {
27
- "acc,none": 0.6124577226606539,
28
- "acc_stderr,none": 0.05783349554301207,
29
- "acc_norm,none": 0.5913190529875987,
30
- "acc_norm_stderr,none": 0.04520523788851735,
31
  "alias": "ai2_arc"
32
  }
33
  },
@@ -118,13 +118,15 @@
118
  "config": {
119
  "model": "hf",
120
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
121
- "batch_size": "2",
122
- "batch_sizes": [],
 
 
123
  "device": null,
124
  "use_cache": null,
125
  "limit": null,
126
  "bootstrap_iters": 100000,
127
  "gen_kwargs": null
128
  },
129
- "git_hash": "4701655"
130
  }
 
1
  {
2
  "results": {
3
  "ai2_arc": {
4
+ "acc,none": 0.6141488162344984,
5
+ "acc_stderr,none": 0.11602553865195812,
6
+ "acc_norm,none": 0.5944193912063134,
7
+ "acc_norm_stderr,none": 0.09008281087077372,
8
  "alias": "ai2_arc"
9
  },
10
  "arc_challenge": {
11
+ "acc,none": 0.36860068259385664,
12
+ "acc_stderr,none": 0.014097810678042187,
13
+ "acc_norm,none": 0.4044368600682594,
14
+ "acc_norm_stderr,none": 0.014342036483436174,
15
  "alias": " - arc_challenge"
16
  },
17
  "arc_easy": {
18
+ "acc,none": 0.7352693602693603,
19
+ "acc_stderr,none": 0.009053021086173977,
20
+ "acc_norm,none": 0.6881313131313131,
21
+ "acc_norm_stderr,none": 0.00950582334581765,
22
  "alias": " - arc_easy"
23
  }
24
  },
25
  "groups": {
26
  "ai2_arc": {
27
+ "acc,none": 0.6141488162344984,
28
+ "acc_stderr,none": 0.11602553865195812,
29
+ "acc_norm,none": 0.5944193912063134,
30
+ "acc_norm_stderr,none": 0.09008281087077372,
31
  "alias": "ai2_arc"
32
  }
33
  },
 
118
  "config": {
119
  "model": "hf",
120
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
121
+ "batch_size": "auto",
122
+ "batch_sizes": [
123
+ 32
124
+ ],
125
  "device": null,
126
  "use_cache": null,
127
  "limit": null,
128
  "bootstrap_iters": 100000,
129
  "gen_kwargs": null
130
  },
131
+ "git_hash": "2e3ceb0"
132
  }
lm-eval-output/allenai/OLMo-7B/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ceff5a207e0d82cc58530f46cc2f8bbda2dfdf1798d357df795ca30a140fe123
3
- size 22372
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:feda1a9a8d31385965b7cd26c524ddba7445b789e2a118919a275c549d0cea1e
3
+ size 16263
lm-eval-output/allenai/OLMo-7B/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:30daa66193be9558d1965ef1f9bb9c02d4a91213fb3a14fac0879bcf930e1e4b
3
- size 1070250
 
 
 
 
lm-eval-output/allenai/OLMo-7B/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,30 +1,30 @@
1
  {
2
  "results": {
3
  "anli": {
4
- "acc,none": 0.3471875,
5
- "acc_stderr,none": 0.01681366289388001,
6
  "alias": "anli"
7
  },
8
  "anli_r1": {
9
- "acc,none": 0.327,
10
- "acc_stderr,none": 0.014842213153411237,
11
  "alias": " - anli_r1"
12
  },
13
  "anli_r2": {
14
- "acc,none": 0.36,
15
- "acc_stderr,none": 0.015186527932040122,
16
  "alias": " - anli_r2"
17
  },
18
  "anli_r3": {
19
- "acc,none": 0.35333333333333333,
20
- "acc_stderr,none": 0.01380457216231493,
21
  "alias": " - anli_r3"
22
  }
23
  },
24
  "groups": {
25
  "anli": {
26
- "acc,none": 0.3471875,
27
- "acc_stderr,none": 0.01681366289388001,
28
  "alias": "anli"
29
  }
30
  },
@@ -147,13 +147,15 @@
147
  "config": {
148
  "model": "hf",
149
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
150
- "batch_size": "2",
151
- "batch_sizes": [],
 
 
152
  "device": null,
153
  "use_cache": null,
154
  "limit": null,
155
  "bootstrap_iters": 100000,
156
  "gen_kwargs": null
157
  },
158
- "git_hash": "4701655"
159
  }
 
1
  {
2
  "results": {
3
  "anli": {
4
+ "acc,none": 0.3465625,
5
+ "acc_stderr,none": 0.016792339011968412,
6
  "alias": "anli"
7
  },
8
  "anli_r1": {
9
+ "acc,none": 0.325,
10
+ "acc_stderr,none": 0.014818724459095526,
11
  "alias": " - anli_r1"
12
  },
13
  "anli_r2": {
14
+ "acc,none": 0.356,
15
+ "acc_stderr,none": 0.015149042659306626,
16
  "alias": " - anli_r2"
17
  },
18
  "anli_r3": {
19
+ "acc,none": 0.3566666666666667,
20
+ "acc_stderr,none": 0.013833742805050717,
21
  "alias": " - anli_r3"
22
  }
23
  },
24
  "groups": {
25
  "anli": {
26
+ "acc,none": 0.3465625,
27
+ "acc_stderr,none": 0.016792339011968412,
28
  "alias": "anli"
29
  }
30
  },
 
147
  "config": {
148
  "model": "hf",
149
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
150
+ "batch_size": "auto",
151
+ "batch_sizes": [
152
+ 32
153
+ ],
154
  "device": null,
155
  "use_cache": null,
156
  "limit": null,
157
  "bootstrap_iters": 100000,
158
  "gen_kwargs": null
159
  },
160
+ "git_hash": "2e3ceb0"
161
  }
lm-eval-output/allenai/OLMo-7B/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4e49a85e9b5317ffd4811563ed8a48d3e7219d613f61b93e7c22ca6cd249df70
3
- size 21092
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df4971e214a122144a0bdc530721b0e62f5d9807f2626826442b773b85ceb849
3
+ size 14676
lm-eval-output/allenai/OLMo-7B/arithmetic/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:54f3c8990eb96dc7ac5c4e5586ce6ed5d89feec36cad016d2e19582657216f69
3
- size 571330
 
 
 
 
lm-eval-output/allenai/OLMo-7B/arithmetic/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "results": {
3
  "arithmetic": {
4
- "acc,none": 0.0069,
5
- "acc_stderr,none": 0.006602463150804194,
6
  "alias": "arithmetic"
7
  },
8
  "arithmetic_1dc": {
9
- "acc,none": 0.005,
10
- "acc_stderr,none": 0.0015775754727385474,
11
  "alias": " - arithmetic_1dc"
12
  },
13
  "arithmetic_2da": {
@@ -16,8 +16,8 @@
16
  "alias": " - arithmetic_2da"
17
  },
18
  "arithmetic_2dm": {
19
- "acc,none": 0.0295,
20
- "acc_stderr,none": 0.0037844465933618916,
21
  "alias": " - arithmetic_2dm"
22
  },
23
  "arithmetic_2ds": {
@@ -58,8 +58,8 @@
58
  },
59
  "groups": {
60
  "arithmetic": {
61
- "acc,none": 0.0069,
62
- "acc_stderr,none": 0.006602463150804194,
63
  "alias": "arithmetic"
64
  }
65
  },
@@ -364,13 +364,15 @@
364
  "config": {
365
  "model": "hf",
366
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
367
- "batch_size": "2",
368
- "batch_sizes": [],
 
 
369
  "device": null,
370
  "use_cache": null,
371
  "limit": null,
372
  "bootstrap_iters": 100000,
373
  "gen_kwargs": null
374
  },
375
- "git_hash": "4701655"
376
  }
 
1
  {
2
  "results": {
3
  "arithmetic": {
4
+ "acc,none": 0.007,
5
+ "acc_stderr,none": 0.006724488098523242,
6
  "alias": "arithmetic"
7
  },
8
  "arithmetic_1dc": {
9
+ "acc,none": 0.0065,
10
+ "acc_stderr,none": 0.0017973564602277768,
11
  "alias": " - arithmetic_1dc"
12
  },
13
  "arithmetic_2da": {
 
16
  "alias": " - arithmetic_2da"
17
  },
18
  "arithmetic_2dm": {
19
+ "acc,none": 0.029,
20
+ "acc_stderr,none": 0.0037532044004605246,
21
  "alias": " - arithmetic_2dm"
22
  },
23
  "arithmetic_2ds": {
 
58
  },
59
  "groups": {
60
  "arithmetic": {
61
+ "acc,none": 0.007,
62
+ "acc_stderr,none": 0.006724488098523242,
63
  "alias": "arithmetic"
64
  }
65
  },
 
364
  "config": {
365
  "model": "hf",
366
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
367
+ "batch_size": "auto",
368
+ "batch_sizes": [
369
+ 64
370
+ ],
371
  "device": null,
372
  "use_cache": null,
373
  "limit": null,
374
  "bootstrap_iters": 100000,
375
  "gen_kwargs": null
376
  },
377
+ "git_hash": "2e3ceb0"
378
  }
lm-eval-output/allenai/OLMo-7B/arithmetic/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bc302e5d00e6873c97ee6f5549a0c1cb9f76a06956b6cb32d874a17c6b1e548b
3
- size 32726
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:336d5042f526e0b5a3e6045ddb2334d66ffc6ba7c4f85d38364256c924609891
3
+ size 25619
lm-eval-output/allenai/OLMo-7B/arithmetic__/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4e36726446be44a4ca03fdd234a83b2cf7fb9884d27cb2ab737c41c9d9dae9f5
3
- size 571331
 
 
 
 
lm-eval-output/allenai/OLMo-7B/arithmetic__/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -36,8 +36,8 @@
36
  "alias": "arithmetic_2ds"
37
  },
38
  "arithmetic_2dm": {
39
- "acc,none": 0.0295,
40
- "acc_stderr,none": 0.0037844465933618916,
41
  "alias": "arithmetic_2dm"
42
  },
43
  "arithmetic_2da": {
@@ -46,8 +46,8 @@
46
  "alias": "arithmetic_2da"
47
  },
48
  "arithmetic_1dc": {
49
- "acc,none": 0.005,
50
- "acc_stderr,none": 0.0015775754727385474,
51
  "alias": "arithmetic_1dc"
52
  }
53
  },
@@ -350,13 +350,15 @@
350
  "config": {
351
  "model": "hf",
352
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
353
- "batch_size": "2",
354
- "batch_sizes": [],
 
 
355
  "device": null,
356
  "use_cache": null,
357
  "limit": null,
358
  "bootstrap_iters": 100000,
359
  "gen_kwargs": null
360
  },
361
- "git_hash": "4701655"
362
  }
 
36
  "alias": "arithmetic_2ds"
37
  },
38
  "arithmetic_2dm": {
39
+ "acc,none": 0.029,
40
+ "acc_stderr,none": 0.0037532044004605246,
41
  "alias": "arithmetic_2dm"
42
  },
43
  "arithmetic_2da": {
 
46
  "alias": "arithmetic_2da"
47
  },
48
  "arithmetic_1dc": {
49
+ "acc,none": 0.0065,
50
+ "acc_stderr,none": 0.0017973564602277768,
51
  "alias": "arithmetic_1dc"
52
  }
53
  },
 
350
  "config": {
351
  "model": "hf",
352
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
353
+ "batch_size": "auto",
354
+ "batch_sizes": [
355
+ 64
356
+ ],
357
  "device": null,
358
  "use_cache": null,
359
  "limit": null,
360
  "bootstrap_iters": 100000,
361
  "gen_kwargs": null
362
  },
363
+ "git_hash": "2e3ceb0"
364
  }
lm-eval-output/allenai/OLMo-7B/arithmetic__/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bf64ee5e732687e94dbcef3284174f1ce5468651d74df1c89e59d52fbbc17d0e
3
- size 33636
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c280fce1e1826f134c773e5a830152245f71f8505f03d4c5cfcb4af0ff0f9df8
3
+ size 21272
lm-eval-output/allenai/OLMo-7B/asdiv/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d9004ca282c30e0451d79151305ebe50933d491576bde59f97ec09f2d20999a
3
- size 265321
 
 
 
 
lm-eval-output/allenai/OLMo-7B/asdiv/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "results": {
3
  "asdiv": {
4
- "acc,none": 0.016919739696312365,
5
- "acc_stderr,none": 0.002686891250897643,
6
  "alias": "asdiv"
7
  }
8
  },
@@ -41,13 +41,15 @@
41
  "config": {
42
  "model": "hf",
43
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
44
- "batch_size": "2",
45
- "batch_sizes": [],
 
 
46
  "device": null,
47
  "use_cache": null,
48
  "limit": null,
49
  "bootstrap_iters": 100000,
50
  "gen_kwargs": null
51
  },
52
- "git_hash": "4701655"
53
  }
 
1
  {
2
  "results": {
3
  "asdiv": {
4
+ "acc,none": 0.015618221258134491,
5
+ "acc_stderr,none": 0.002583189883690767,
6
  "alias": "asdiv"
7
  }
8
  },
 
41
  "config": {
42
  "model": "hf",
43
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
44
+ "batch_size": "auto",
45
+ "batch_sizes": [
46
+ 64
47
+ ],
48
  "device": null,
49
  "use_cache": null,
50
  "limit": null,
51
  "bootstrap_iters": 100000,
52
  "gen_kwargs": null
53
  },
54
+ "git_hash": "2e3ceb0"
55
  }
lm-eval-output/allenai/OLMo-7B/asdiv/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5bee59ce6f91fa784a9ec22efa5da1a1a0a6720cb9c2a9459dd0ddeda079b8b5
3
- size 15826
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc981690a7e19989d96bb07b4c91f750b1d9921eca7d50386cdc5233a531770c
3
+ size 16390
lm-eval-output/allenai/OLMo-7B/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:bd87668d838a678fe98acd570f4f888ebadc3a001ba22cf884b87023aceebbc5
3
- size 4244097
 
 
 
 
lm-eval-output/allenai/OLMo-7B/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "results": {
3
  "blimp": {
4
- "acc,none": 0.8318358208955224,
5
- "acc_stderr,none": 0.15243408065668698,
6
  "alias": "blimp"
7
  },
8
  "blimp_adjunct_island": {
9
- "acc,none": 0.902,
10
- "acc_stderr,none": 0.009406619184621223,
11
  "alias": " - blimp_adjunct_island"
12
  },
13
  "blimp_anaphor_gender_agreement": {
@@ -16,23 +16,23 @@
16
  "alias": " - blimp_anaphor_gender_agreement"
17
  },
18
  "blimp_anaphor_number_agreement": {
19
- "acc,none": 0.992,
20
- "acc_stderr,none": 0.0028185003005045057,
21
  "alias": " - blimp_anaphor_number_agreement"
22
  },
23
  "blimp_animate_subject_passive": {
24
- "acc,none": 0.809,
25
- "acc_stderr,none": 0.012436787112179491,
26
  "alias": " - blimp_animate_subject_passive"
27
  },
28
  "blimp_animate_subject_trans": {
29
- "acc,none": 0.911,
30
- "acc_stderr,none": 0.009008893392651514,
31
  "alias": " - blimp_animate_subject_trans"
32
  },
33
  "blimp_causative": {
34
- "acc,none": 0.743,
35
- "acc_stderr,none": 0.013825416526895047,
36
  "alias": " - blimp_causative"
37
  },
38
  "blimp_complex_NP_island": {
@@ -41,113 +41,113 @@
41
  "alias": " - blimp_complex_NP_island"
42
  },
43
  "blimp_coordinate_structure_constraint_complex_left_branch": {
44
- "acc,none": 0.821,
45
- "acc_stderr,none": 0.012128730605719113,
46
  "alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
47
  },
48
  "blimp_coordinate_structure_constraint_object_extraction": {
49
- "acc,none": 0.889,
50
- "acc_stderr,none": 0.009938701010583726,
51
  "alias": " - blimp_coordinate_structure_constraint_object_extraction"
52
  },
53
  "blimp_determiner_noun_agreement_1": {
54
  "acc,none": 0.986,
55
- "acc_stderr,none": 0.0037172325482565916,
56
  "alias": " - blimp_determiner_noun_agreement_1"
57
  },
58
  "blimp_determiner_noun_agreement_2": {
59
- "acc,none": 0.975,
60
- "acc_stderr,none": 0.0049395748196984545,
61
  "alias": " - blimp_determiner_noun_agreement_2"
62
  },
63
  "blimp_determiner_noun_agreement_irregular_1": {
64
- "acc,none": 0.943,
65
- "acc_stderr,none": 0.0073351758537068355,
66
  "alias": " - blimp_determiner_noun_agreement_irregular_1"
67
  },
68
  "blimp_determiner_noun_agreement_irregular_2": {
69
- "acc,none": 0.951,
70
- "acc_stderr,none": 0.006829761756140914,
71
  "alias": " - blimp_determiner_noun_agreement_irregular_2"
72
  },
73
  "blimp_determiner_noun_agreement_with_adj_2": {
74
- "acc,none": 0.948,
75
- "acc_stderr,none": 0.007024624213817146,
76
  "alias": " - blimp_determiner_noun_agreement_with_adj_2"
77
  },
78
  "blimp_determiner_noun_agreement_with_adj_irregular_1": {
79
- "acc,none": 0.882,
80
- "acc_stderr,none": 0.01020686926438179,
81
  "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
82
  },
83
  "blimp_determiner_noun_agreement_with_adj_irregular_2": {
84
- "acc,none": 0.929,
85
- "acc_stderr,none": 0.008125578442487916,
86
  "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
87
  },
88
  "blimp_determiner_noun_agreement_with_adjective_1": {
89
- "acc,none": 0.971,
90
- "acc_stderr,none": 0.005309160685756979,
91
  "alias": " - blimp_determiner_noun_agreement_with_adjective_1"
92
  },
93
  "blimp_distractor_agreement_relational_noun": {
94
- "acc,none": 0.919,
95
- "acc_stderr,none": 0.008632121032139986,
96
  "alias": " - blimp_distractor_agreement_relational_noun"
97
  },
98
  "blimp_distractor_agreement_relative_clause": {
99
- "acc,none": 0.705,
100
- "acc_stderr,none": 0.014428554438445523,
101
  "alias": " - blimp_distractor_agreement_relative_clause"
102
  },
103
  "blimp_drop_argument": {
104
- "acc,none": 0.746,
105
- "acc_stderr,none": 0.013772206565168537,
106
  "alias": " - blimp_drop_argument"
107
  },
108
  "blimp_ellipsis_n_bar_1": {
109
  "acc,none": 0.812,
110
- "acc_stderr,none": 0.012361586015103749,
111
  "alias": " - blimp_ellipsis_n_bar_1"
112
  },
113
  "blimp_ellipsis_n_bar_2": {
114
- "acc,none": 0.948,
115
- "acc_stderr,none": 0.0070246242138171325,
116
  "alias": " - blimp_ellipsis_n_bar_2"
117
  },
118
  "blimp_existential_there_object_raising": {
119
- "acc,none": 0.859,
120
- "acc_stderr,none": 0.011010914595992438,
121
  "alias": " - blimp_existential_there_object_raising"
122
  },
123
  "blimp_existential_there_quantifiers_1": {
124
- "acc,none": 0.984,
125
- "acc_stderr,none": 0.0039698563903194225,
126
  "alias": " - blimp_existential_there_quantifiers_1"
127
  },
128
  "blimp_existential_there_quantifiers_2": {
129
  "acc,none": 0.377,
130
- "acc_stderr,none": 0.01533317012577985,
131
  "alias": " - blimp_existential_there_quantifiers_2"
132
  },
133
  "blimp_existential_there_subject_raising": {
134
- "acc,none": 0.909,
135
- "acc_stderr,none": 0.009099549538400252,
136
  "alias": " - blimp_existential_there_subject_raising"
137
  },
138
  "blimp_expletive_it_object_raising": {
139
- "acc,none": 0.827,
140
- "acc_stderr,none": 0.011967214137559929,
141
  "alias": " - blimp_expletive_it_object_raising"
142
  },
143
  "blimp_inchoative": {
144
- "acc,none": 0.678,
145
- "acc_stderr,none": 0.014782913600996662,
146
  "alias": " - blimp_inchoative"
147
  },
148
  "blimp_intransitive": {
149
- "acc,none": 0.8,
150
- "acc_stderr,none": 0.012655439943366651,
151
  "alias": " - blimp_intransitive"
152
  },
153
  "blimp_irregular_past_participle_adjectives": {
@@ -156,68 +156,68 @@
156
  "alias": " - blimp_irregular_past_participle_adjectives"
157
  },
158
  "blimp_irregular_past_participle_verbs": {
159
- "acc,none": 0.912,
160
- "acc_stderr,none": 0.008963053962592081,
161
  "alias": " - blimp_irregular_past_participle_verbs"
162
  },
163
  "blimp_irregular_plural_subject_verb_agreement_1": {
164
- "acc,none": 0.93,
165
- "acc_stderr,none": 0.008072494358323508,
166
  "alias": " - blimp_irregular_plural_subject_verb_agreement_1"
167
  },
168
  "blimp_irregular_plural_subject_verb_agreement_2": {
169
- "acc,none": 0.937,
170
- "acc_stderr,none": 0.007687007876286417,
171
  "alias": " - blimp_irregular_plural_subject_verb_agreement_2"
172
  },
173
  "blimp_left_branch_island_echo_question": {
174
- "acc,none": 0.633,
175
- "acc_stderr,none": 0.015249378464171745,
176
  "alias": " - blimp_left_branch_island_echo_question"
177
  },
178
  "blimp_left_branch_island_simple_question": {
179
- "acc,none": 0.909,
180
- "acc_stderr,none": 0.009099549538400227,
181
  "alias": " - blimp_left_branch_island_simple_question"
182
  },
183
  "blimp_matrix_question_npi_licensor_present": {
184
- "acc,none": 0.617,
185
- "acc_stderr,none": 0.015380102325652713,
186
  "alias": " - blimp_matrix_question_npi_licensor_present"
187
  },
188
  "blimp_npi_present_1": {
189
- "acc,none": 0.671,
190
- "acc_stderr,none": 0.014865395385928362,
191
  "alias": " - blimp_npi_present_1"
192
  },
193
  "blimp_npi_present_2": {
194
- "acc,none": 0.735,
195
- "acc_stderr,none": 0.013963164754809953,
196
  "alias": " - blimp_npi_present_2"
197
  },
198
  "blimp_only_npi_licensor_present": {
199
- "acc,none": 0.971,
200
- "acc_stderr,none": 0.005309160685756993,
201
  "alias": " - blimp_only_npi_licensor_present"
202
  },
203
  "blimp_only_npi_scope": {
204
- "acc,none": 0.678,
205
- "acc_stderr,none": 0.014782913600996685,
206
  "alias": " - blimp_only_npi_scope"
207
  },
208
  "blimp_passive_1": {
209
- "acc,none": 0.897,
210
- "acc_stderr,none": 0.0096168333396958,
211
  "alias": " - blimp_passive_1"
212
  },
213
  "blimp_passive_2": {
214
- "acc,none": 0.909,
215
- "acc_stderr,none": 0.009099549538400236,
216
  "alias": " - blimp_passive_2"
217
  },
218
  "blimp_principle_A_c_command": {
219
- "acc,none": 0.74,
220
- "acc_stderr,none": 0.013877773329774164,
221
  "alias": " - blimp_principle_A_c_command"
222
  },
223
  "blimp_principle_A_case_1": {
@@ -226,23 +226,23 @@
226
  "alias": " - blimp_principle_A_case_1"
227
  },
228
  "blimp_principle_A_case_2": {
229
- "acc,none": 0.967,
230
- "acc_stderr,none": 0.0056518088204523705,
231
  "alias": " - blimp_principle_A_case_2"
232
  },
233
  "blimp_principle_A_domain_1": {
234
- "acc,none": 0.998,
235
- "acc_stderr,none": 0.0014135055705578,
236
  "alias": " - blimp_principle_A_domain_1"
237
  },
238
  "blimp_principle_A_domain_2": {
239
- "acc,none": 0.84,
240
- "acc_stderr,none": 0.011598902298689004,
241
  "alias": " - blimp_principle_A_domain_2"
242
  },
243
  "blimp_principle_A_domain_3": {
244
- "acc,none": 0.736,
245
- "acc_stderr,none": 0.013946271849440467,
246
  "alias": " - blimp_principle_A_domain_3"
247
  },
248
  "blimp_principle_A_reconstruction": {
@@ -251,13 +251,13 @@
251
  "alias": " - blimp_principle_A_reconstruction"
252
  },
253
  "blimp_regular_plural_subject_verb_agreement_1": {
254
- "acc,none": 0.967,
255
- "acc_stderr,none": 0.005651808820452372,
256
  "alias": " - blimp_regular_plural_subject_verb_agreement_1"
257
  },
258
  "blimp_regular_plural_subject_verb_agreement_2": {
259
- "acc,none": 0.935,
260
- "acc_stderr,none": 0.007799733061832028,
261
  "alias": " - blimp_regular_plural_subject_verb_agreement_2"
262
  },
263
  "blimp_sentential_negation_npi_licensor_present": {
@@ -266,73 +266,73 @@
266
  "alias": " - blimp_sentential_negation_npi_licensor_present"
267
  },
268
  "blimp_sentential_negation_npi_scope": {
269
- "acc,none": 0.761,
270
- "acc_stderr,none": 0.013493000446937591,
271
  "alias": " - blimp_sentential_negation_npi_scope"
272
  },
273
  "blimp_sentential_subject_island": {
274
- "acc,none": 0.562,
275
- "acc_stderr,none": 0.01569721001969469,
276
  "alias": " - blimp_sentential_subject_island"
277
  },
278
  "blimp_superlative_quantifiers_1": {
279
- "acc,none": 0.897,
280
- "acc_stderr,none": 0.009616833339695784,
281
  "alias": " - blimp_superlative_quantifiers_1"
282
  },
283
  "blimp_superlative_quantifiers_2": {
284
- "acc,none": 0.881,
285
- "acc_stderr,none": 0.010244215145336666,
286
  "alias": " - blimp_superlative_quantifiers_2"
287
  },
288
  "blimp_tough_vs_raising_1": {
289
- "acc,none": 0.669,
290
- "acc_stderr,none": 0.014888272588203934,
291
  "alias": " - blimp_tough_vs_raising_1"
292
  },
293
  "blimp_tough_vs_raising_2": {
294
- "acc,none": 0.856,
295
- "acc_stderr,none": 0.01110798754893915,
296
  "alias": " - blimp_tough_vs_raising_2"
297
  },
298
  "blimp_transitive": {
299
- "acc,none": 0.862,
300
- "acc_stderr,none": 0.010912152632504378,
301
  "alias": " - blimp_transitive"
302
  },
303
  "blimp_wh_island": {
304
- "acc,none": 0.879,
305
- "acc_stderr,none": 0.010318210380946085,
306
  "alias": " - blimp_wh_island"
307
  },
308
  "blimp_wh_questions_object_gap": {
309
- "acc,none": 0.84,
310
- "acc_stderr,none": 0.011598902298689004,
311
  "alias": " - blimp_wh_questions_object_gap"
312
  },
313
  "blimp_wh_questions_subject_gap": {
314
- "acc,none": 0.931,
315
- "acc_stderr,none": 0.00801893405031516,
316
  "alias": " - blimp_wh_questions_subject_gap"
317
  },
318
  "blimp_wh_questions_subject_gap_long_distance": {
319
- "acc,none": 0.919,
320
- "acc_stderr,none": 0.008632121032139969,
321
  "alias": " - blimp_wh_questions_subject_gap_long_distance"
322
  },
323
  "blimp_wh_vs_that_no_gap": {
324
- "acc,none": 0.973,
325
- "acc_stderr,none": 0.005128089049275289,
326
  "alias": " - blimp_wh_vs_that_no_gap"
327
  },
328
  "blimp_wh_vs_that_no_gap_long_distance": {
329
- "acc,none": 0.961,
330
- "acc_stderr,none": 0.006125072776426141,
331
  "alias": " - blimp_wh_vs_that_no_gap_long_distance"
332
  },
333
  "blimp_wh_vs_that_with_gap": {
334
- "acc,none": 0.349,
335
- "acc_stderr,none": 0.015080663991563098,
336
  "alias": " - blimp_wh_vs_that_with_gap"
337
  },
338
  "blimp_wh_vs_that_with_gap_long_distance": {
@@ -343,8 +343,8 @@
343
  },
344
  "groups": {
345
  "blimp": {
346
- "acc,none": 0.8318358208955224,
347
- "acc_stderr,none": 0.15243408065668698,
348
  "alias": "blimp"
349
  }
350
  },
@@ -2235,13 +2235,15 @@
2235
  "config": {
2236
  "model": "hf",
2237
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
2238
- "batch_size": "2",
2239
- "batch_sizes": [],
 
 
2240
  "device": null,
2241
  "use_cache": null,
2242
  "limit": null,
2243
  "bootstrap_iters": 100000,
2244
  "gen_kwargs": null
2245
  },
2246
- "git_hash": "4701655"
2247
  }
 
1
  {
2
  "results": {
3
  "blimp": {
4
+ "acc,none": 0.8316865671641791,
5
+ "acc_stderr,none": 0.1603584899107365,
6
  "alias": "blimp"
7
  },
8
  "blimp_adjunct_island": {
9
+ "acc,none": 0.904,
10
+ "acc_stderr,none": 0.009320454434783215,
11
  "alias": " - blimp_adjunct_island"
12
  },
13
  "blimp_anaphor_gender_agreement": {
 
16
  "alias": " - blimp_anaphor_gender_agreement"
17
  },
18
  "blimp_anaphor_number_agreement": {
19
+ "acc,none": 0.994,
20
+ "acc_stderr,none": 0.0024433521993298428,
21
  "alias": " - blimp_anaphor_number_agreement"
22
  },
23
  "blimp_animate_subject_passive": {
24
+ "acc,none": 0.807,
25
+ "acc_stderr,none": 0.012486268734370145,
26
  "alias": " - blimp_animate_subject_passive"
27
  },
28
  "blimp_animate_subject_trans": {
29
+ "acc,none": 0.913,
30
+ "acc_stderr,none": 0.008916866630745918,
31
  "alias": " - blimp_animate_subject_trans"
32
  },
33
  "blimp_causative": {
34
+ "acc,none": 0.728,
35
+ "acc_stderr,none": 0.014078856992462623,
36
  "alias": " - blimp_causative"
37
  },
38
  "blimp_complex_NP_island": {
 
41
  "alias": " - blimp_complex_NP_island"
42
  },
43
  "blimp_coordinate_structure_constraint_complex_left_branch": {
44
+ "acc,none": 0.82,
45
+ "acc_stderr,none": 0.012155153135511949,
46
  "alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
47
  },
48
  "blimp_coordinate_structure_constraint_object_extraction": {
49
+ "acc,none": 0.891,
50
+ "acc_stderr,none": 0.009859828407037188,
51
  "alias": " - blimp_coordinate_structure_constraint_object_extraction"
52
  },
53
  "blimp_determiner_noun_agreement_1": {
54
  "acc,none": 0.986,
55
+ "acc_stderr,none": 0.0037172325482565877,
56
  "alias": " - blimp_determiner_noun_agreement_1"
57
  },
58
  "blimp_determiner_noun_agreement_2": {
59
+ "acc,none": 0.973,
60
+ "acc_stderr,none": 0.005128089049275288,
61
  "alias": " - blimp_determiner_noun_agreement_2"
62
  },
63
  "blimp_determiner_noun_agreement_irregular_1": {
64
+ "acc,none": 0.933,
65
+ "acc_stderr,none": 0.00791034598317755,
66
  "alias": " - blimp_determiner_noun_agreement_irregular_1"
67
  },
68
  "blimp_determiner_noun_agreement_irregular_2": {
69
+ "acc,none": 0.954,
70
+ "acc_stderr,none": 0.006627814717380719,
71
  "alias": " - blimp_determiner_noun_agreement_irregular_2"
72
  },
73
  "blimp_determiner_noun_agreement_with_adj_2": {
74
+ "acc,none": 0.95,
75
+ "acc_stderr,none": 0.0068954729748979,
76
  "alias": " - blimp_determiner_noun_agreement_with_adj_2"
77
  },
78
  "blimp_determiner_noun_agreement_with_adj_irregular_1": {
79
+ "acc,none": 0.879,
80
+ "acc_stderr,none": 0.010318210380946088,
81
  "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
82
  },
83
  "blimp_determiner_noun_agreement_with_adj_irregular_2": {
84
+ "acc,none": 0.931,
85
+ "acc_stderr,none": 0.00801893405031516,
86
  "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
87
  },
88
  "blimp_determiner_noun_agreement_with_adjective_1": {
89
+ "acc,none": 0.973,
90
+ "acc_stderr,none": 0.00512808904927529,
91
  "alias": " - blimp_determiner_noun_agreement_with_adjective_1"
92
  },
93
  "blimp_distractor_agreement_relational_noun": {
94
+ "acc,none": 0.923,
95
+ "acc_stderr,none": 0.008434580140240648,
96
  "alias": " - blimp_distractor_agreement_relational_noun"
97
  },
98
  "blimp_distractor_agreement_relative_clause": {
99
+ "acc,none": 0.717,
100
+ "acc_stderr,none": 0.014251810906481728,
101
  "alias": " - blimp_distractor_agreement_relative_clause"
102
  },
103
  "blimp_drop_argument": {
104
+ "acc,none": 0.747,
105
+ "acc_stderr,none": 0.01375427861358708,
106
  "alias": " - blimp_drop_argument"
107
  },
108
  "blimp_ellipsis_n_bar_1": {
109
  "acc,none": 0.812,
110
+ "acc_stderr,none": 0.012361586015103744,
111
  "alias": " - blimp_ellipsis_n_bar_1"
112
  },
113
  "blimp_ellipsis_n_bar_2": {
114
+ "acc,none": 0.949,
115
+ "acc_stderr,none": 0.006960420062571421,
116
  "alias": " - blimp_ellipsis_n_bar_2"
117
  },
118
  "blimp_existential_there_object_raising": {
119
+ "acc,none": 0.864,
120
+ "acc_stderr,none": 0.010845350230472988,
121
  "alias": " - blimp_existential_there_object_raising"
122
  },
123
  "blimp_existential_there_quantifiers_1": {
124
+ "acc,none": 0.985,
125
+ "acc_stderr,none": 0.0038457495745030067,
126
  "alias": " - blimp_existential_there_quantifiers_1"
127
  },
128
  "blimp_existential_there_quantifiers_2": {
129
  "acc,none": 0.377,
130
+ "acc_stderr,none": 0.015333170125779847,
131
  "alias": " - blimp_existential_there_quantifiers_2"
132
  },
133
  "blimp_existential_there_subject_raising": {
134
+ "acc,none": 0.911,
135
+ "acc_stderr,none": 0.009008893392651523,
136
  "alias": " - blimp_existential_there_subject_raising"
137
  },
138
  "blimp_expletive_it_object_raising": {
139
+ "acc,none": 0.826,
140
+ "acc_stderr,none": 0.01199449323097343,
141
  "alias": " - blimp_expletive_it_object_raising"
142
  },
143
  "blimp_inchoative": {
144
+ "acc,none": 0.68,
145
+ "acc_stderr,none": 0.014758652303574874,
146
  "alias": " - blimp_inchoative"
147
  },
148
  "blimp_intransitive": {
149
+ "acc,none": 0.791,
150
+ "acc_stderr,none": 0.012864077288499321,
151
  "alias": " - blimp_intransitive"
152
  },
153
  "blimp_irregular_past_participle_adjectives": {
 
156
  "alias": " - blimp_irregular_past_participle_adjectives"
157
  },
158
  "blimp_irregular_past_participle_verbs": {
159
+ "acc,none": 0.906,
160
+ "acc_stderr,none": 0.009233052000787736,
161
  "alias": " - blimp_irregular_past_participle_verbs"
162
  },
163
  "blimp_irregular_plural_subject_verb_agreement_1": {
164
+ "acc,none": 0.928,
165
+ "acc_stderr,none": 0.008178195576218681,
166
  "alias": " - blimp_irregular_plural_subject_verb_agreement_1"
167
  },
168
  "blimp_irregular_plural_subject_verb_agreement_2": {
169
+ "acc,none": 0.932,
170
+ "acc_stderr,none": 0.007964887911291603,
171
  "alias": " - blimp_irregular_plural_subject_verb_agreement_2"
172
  },
173
  "blimp_left_branch_island_echo_question": {
174
+ "acc,none": 0.648,
175
+ "acc_stderr,none": 0.015110404505648661,
176
  "alias": " - blimp_left_branch_island_echo_question"
177
  },
178
  "blimp_left_branch_island_simple_question": {
179
+ "acc,none": 0.911,
180
+ "acc_stderr,none": 0.009008893392651526,
181
  "alias": " - blimp_left_branch_island_simple_question"
182
  },
183
  "blimp_matrix_question_npi_licensor_present": {
184
+ "acc,none": 0.607,
185
+ "acc_stderr,none": 0.015452824654081496,
186
  "alias": " - blimp_matrix_question_npi_licensor_present"
187
  },
188
  "blimp_npi_present_1": {
189
+ "acc,none": 0.674,
190
+ "acc_stderr,none": 0.014830507204541038,
191
  "alias": " - blimp_npi_present_1"
192
  },
193
  "blimp_npi_present_2": {
194
+ "acc,none": 0.73,
195
+ "acc_stderr,none": 0.014046255632633915,
196
  "alias": " - blimp_npi_present_2"
197
  },
198
  "blimp_only_npi_licensor_present": {
199
+ "acc,none": 0.974,
200
+ "acc_stderr,none": 0.005034813735318216,
201
  "alias": " - blimp_only_npi_licensor_present"
202
  },
203
  "blimp_only_npi_scope": {
204
+ "acc,none": 0.706,
205
+ "acc_stderr,none": 0.01441429054000822,
206
  "alias": " - blimp_only_npi_scope"
207
  },
208
  "blimp_passive_1": {
209
+ "acc,none": 0.895,
210
+ "acc_stderr,none": 0.009698921026024971,
211
  "alias": " - blimp_passive_1"
212
  },
213
  "blimp_passive_2": {
214
+ "acc,none": 0.906,
215
+ "acc_stderr,none": 0.009233052000787728,
216
  "alias": " - blimp_passive_2"
217
  },
218
  "blimp_principle_A_c_command": {
219
+ "acc,none": 0.741,
220
+ "acc_stderr,none": 0.01386041525752791,
221
  "alias": " - blimp_principle_A_c_command"
222
  },
223
  "blimp_principle_A_case_1": {
 
226
  "alias": " - blimp_principle_A_case_1"
227
  },
228
  "blimp_principle_A_case_2": {
229
+ "acc,none": 0.963,
230
+ "acc_stderr,none": 0.005972157622389646,
231
  "alias": " - blimp_principle_A_case_2"
232
  },
233
  "blimp_principle_A_domain_1": {
234
+ "acc,none": 0.999,
235
+ "acc_stderr,none": 0.0010000000000000124,
236
  "alias": " - blimp_principle_A_domain_1"
237
  },
238
  "blimp_principle_A_domain_2": {
239
+ "acc,none": 0.835,
240
+ "acc_stderr,none": 0.011743632866916164,
241
  "alias": " - blimp_principle_A_domain_2"
242
  },
243
  "blimp_principle_A_domain_3": {
244
+ "acc,none": 0.739,
245
+ "acc_stderr,none": 0.013895037677965136,
246
  "alias": " - blimp_principle_A_domain_3"
247
  },
248
  "blimp_principle_A_reconstruction": {
 
251
  "alias": " - blimp_principle_A_reconstruction"
252
  },
253
  "blimp_regular_plural_subject_verb_agreement_1": {
254
+ "acc,none": 0.965,
255
+ "acc_stderr,none": 0.005814534272734963,
256
  "alias": " - blimp_regular_plural_subject_verb_agreement_1"
257
  },
258
  "blimp_regular_plural_subject_verb_agreement_2": {
259
+ "acc,none": 0.931,
260
+ "acc_stderr,none": 0.008018934050315146,
261
  "alias": " - blimp_regular_plural_subject_verb_agreement_2"
262
  },
263
  "blimp_sentential_negation_npi_licensor_present": {
 
266
  "alias": " - blimp_sentential_negation_npi_licensor_present"
267
  },
268
  "blimp_sentential_negation_npi_scope": {
269
+ "acc,none": 0.759,
270
+ "acc_stderr,none": 0.013531522534515419,
271
  "alias": " - blimp_sentential_negation_npi_scope"
272
  },
273
  "blimp_sentential_subject_island": {
274
+ "acc,none": 0.559,
275
+ "acc_stderr,none": 0.01570877989424268,
276
  "alias": " - blimp_sentential_subject_island"
277
  },
278
  "blimp_superlative_quantifiers_1": {
279
+ "acc,none": 0.892,
280
+ "acc_stderr,none": 0.009820001651345714,
281
  "alias": " - blimp_superlative_quantifiers_1"
282
  },
283
  "blimp_superlative_quantifiers_2": {
284
+ "acc,none": 0.877,
285
+ "acc_stderr,none": 0.010391293421849879,
286
  "alias": " - blimp_superlative_quantifiers_2"
287
  },
288
  "blimp_tough_vs_raising_1": {
289
+ "acc,none": 0.663,
290
+ "acc_stderr,none": 0.014955087918653603,
291
  "alias": " - blimp_tough_vs_raising_1"
292
  },
293
  "blimp_tough_vs_raising_2": {
294
+ "acc,none": 0.853,
295
+ "acc_stderr,none": 0.011203415395160335,
296
  "alias": " - blimp_tough_vs_raising_2"
297
  },
298
  "blimp_transitive": {
299
+ "acc,none": 0.867,
300
+ "acc_stderr,none": 0.010743669132397346,
301
  "alias": " - blimp_transitive"
302
  },
303
  "blimp_wh_island": {
304
+ "acc,none": 0.877,
305
+ "acc_stderr,none": 0.010391293421849877,
306
  "alias": " - blimp_wh_island"
307
  },
308
  "blimp_wh_questions_object_gap": {
309
+ "acc,none": 0.841,
310
+ "acc_stderr,none": 0.011569479368271296,
311
  "alias": " - blimp_wh_questions_object_gap"
312
  },
313
  "blimp_wh_questions_subject_gap": {
314
+ "acc,none": 0.933,
315
+ "acc_stderr,none": 0.007910345983177547,
316
  "alias": " - blimp_wh_questions_subject_gap"
317
  },
318
  "blimp_wh_questions_subject_gap_long_distance": {
319
+ "acc,none": 0.922,
320
+ "acc_stderr,none": 0.008484573530118588,
321
  "alias": " - blimp_wh_questions_subject_gap_long_distance"
322
  },
323
  "blimp_wh_vs_that_no_gap": {
324
+ "acc,none": 0.974,
325
+ "acc_stderr,none": 0.0050348137353182325,
326
  "alias": " - blimp_wh_vs_that_no_gap"
327
  },
328
  "blimp_wh_vs_that_no_gap_long_distance": {
329
+ "acc,none": 0.962,
330
+ "acc_stderr,none": 0.006049181150584946,
331
  "alias": " - blimp_wh_vs_that_no_gap_long_distance"
332
  },
333
  "blimp_wh_vs_that_with_gap": {
334
+ "acc,none": 0.341,
335
+ "acc_stderr,none": 0.0149981313484027,
336
  "alias": " - blimp_wh_vs_that_with_gap"
337
  },
338
  "blimp_wh_vs_that_with_gap_long_distance": {
 
343
  },
344
  "groups": {
345
  "blimp": {
346
+ "acc,none": 0.8316865671641791,
347
+ "acc_stderr,none": 0.1603584899107365,
348
  "alias": "blimp"
349
  }
350
  },
 
2235
  "config": {
2236
  "model": "hf",
2237
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
2238
+ "batch_size": "auto",
2239
+ "batch_sizes": [
2240
+ 64
2241
+ ],
2242
  "device": null,
2243
  "use_cache": null,
2244
  "limit": null,
2245
  "bootstrap_iters": 100000,
2246
  "gen_kwargs": null
2247
  },
2248
+ "git_hash": "2e3ceb0"
2249
  }
lm-eval-output/allenai/OLMo-7B/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e03fbfd3dd884d95a74c39c0d75f2436728da1a89c09807a16b28dab309ec385
3
- size 355468
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a3b311b45000c90305578cb19f065bec59f430e131dab0963128cb73e9786b4
3
+ size 294489
lm-eval-output/allenai/OLMo-7B/boolq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7d9982cba484eea117460519dd137bac296cc134391704e329d1546044840296
3
- size 1134661
 
 
 
 
lm-eval-output/allenai/OLMo-7B/boolq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,60 +0,0 @@
1
- {
2
- "results": {
3
- "boolq": {
4
- "acc,none": 0.7253822629969419,
5
- "acc_stderr,none": 0.007806211211206189,
6
- "alias": "boolq"
7
- }
8
- },
9
- "configs": {
10
- "boolq": {
11
- "task": "boolq",
12
- "group": [
13
- "super-glue-lm-eval-v1"
14
- ],
15
- "dataset_path": "super_glue",
16
- "dataset_name": "boolq",
17
- "training_split": "train",
18
- "validation_split": "validation",
19
- "doc_to_text": "{{passage}}\nQuestion: {{question}}?\nAnswer:",
20
- "doc_to_target": "label",
21
- "doc_to_choice": [
22
- "no",
23
- "yes"
24
- ],
25
- "description": "",
26
- "target_delimiter": " ",
27
- "fewshot_delimiter": "\n\n",
28
- "metric_list": [
29
- {
30
- "metric": "acc"
31
- }
32
- ],
33
- "output_type": "multiple_choice",
34
- "repeats": 1,
35
- "should_decontaminate": true,
36
- "doc_to_decontamination_query": "passage",
37
- "metadata": {
38
- "version": 2.0
39
- }
40
- }
41
- },
42
- "versions": {
43
- "boolq": 2.0
44
- },
45
- "n-shot": {
46
- "boolq": 0
47
- },
48
- "config": {
49
- "model": "hf",
50
- "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
51
- "batch_size": "2",
52
- "batch_sizes": [],
53
- "device": null,
54
- "use_cache": null,
55
- "limit": null,
56
- "bootstrap_iters": 100000,
57
- "gen_kwargs": null
58
- },
59
- "git_hash": "4701655"
60
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/allenai/OLMo-7B/boolq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c16a39eb1a979a8a5c5f87ccfccba887e9bd37bc7710687ab9db30269c8f0018
3
- size 19802
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:444fe6f0cbaf5443ee1dfa05e3d4f1806c4556054ceabcfea74b2c4eb6ee803a
3
+ size 21711
lm-eval-output/allenai/OLMo-7B/cb/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf8a16d333c03f7ae4e6f988588806b8dc0483568c8f000306fd1e55cc7779ce
3
- size 13905
 
 
 
 
lm-eval-output/allenai/OLMo-7B/cb/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -54,13 +54,15 @@
54
  "config": {
55
  "model": "hf",
56
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
57
- "batch_size": "2",
58
- "batch_sizes": [],
 
 
59
  "device": null,
60
  "use_cache": null,
61
  "limit": null,
62
  "bootstrap_iters": 100000,
63
  "gen_kwargs": null
64
  },
65
- "git_hash": "4701655"
66
  }
 
54
  "config": {
55
  "model": "hf",
56
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
57
+ "batch_size": "auto",
58
+ "batch_sizes": [
59
+ 16
60
+ ],
61
  "device": null,
62
  "use_cache": null,
63
  "limit": null,
64
  "bootstrap_iters": 100000,
65
  "gen_kwargs": null
66
  },
67
+ "git_hash": "2e3ceb0"
68
  }
lm-eval-output/allenai/OLMo-7B/cb/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:83fbfd9e158b5fbf92b40a3b0ac21aa5830e033698a6e6fbd1a0c00a82f4e0a3
3
- size 13372
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:239d0b8aec1eddb6f22bc82f54f5fa42e3a45de06aff4fc8e0aef720286f58fe
3
+ size 14061
lm-eval-output/allenai/OLMo-7B/ceval-valid/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:755b95da7d45b3a97ad51d88b7a6f6ab085c43413f8d7099119bf26337c959e1
3
- size 321200
 
 
 
 
lm-eval-output/allenai/OLMo-7B/ceval-valid/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json DELETED
@@ -1,2588 +0,0 @@
1
- {
2
- "results": {
3
- "ceval-valid": {
4
- "acc,none": 0.24962852897473997,
5
- "acc_stderr,none": 0.11387085890117267,
6
- "acc_norm,none": 0.24962852897473997,
7
- "acc_norm_stderr,none": 0.11387085890117267,
8
- "alias": "ceval-valid"
9
- },
10
- "ceval-valid_accountant": {
11
- "acc,none": 0.22448979591836735,
12
- "acc_stderr,none": 0.06022425581505364,
13
- "acc_norm,none": 0.22448979591836735,
14
- "acc_norm_stderr,none": 0.06022425581505364,
15
- "alias": " - ceval-valid_accountant"
16
- },
17
- "ceval-valid_advanced_mathematics": {
18
- "acc,none": 0.10526315789473684,
19
- "acc_stderr,none": 0.0723351864143449,
20
- "acc_norm,none": 0.10526315789473684,
21
- "acc_norm_stderr,none": 0.0723351864143449,
22
- "alias": " - ceval-valid_advanced_mathematics"
23
- },
24
- "ceval-valid_art_studies": {
25
- "acc,none": 0.30303030303030304,
26
- "acc_stderr,none": 0.08124094920275463,
27
- "acc_norm,none": 0.30303030303030304,
28
- "acc_norm_stderr,none": 0.08124094920275463,
29
- "alias": " - ceval-valid_art_studies"
30
- },
31
- "ceval-valid_basic_medicine": {
32
- "acc,none": 0.15789473684210525,
33
- "acc_stderr,none": 0.08594700851870798,
34
- "acc_norm,none": 0.15789473684210525,
35
- "acc_norm_stderr,none": 0.08594700851870798,
36
- "alias": " - ceval-valid_basic_medicine"
37
- },
38
- "ceval-valid_business_administration": {
39
- "acc,none": 0.21212121212121213,
40
- "acc_stderr,none": 0.07226812131946557,
41
- "acc_norm,none": 0.21212121212121213,
42
- "acc_norm_stderr,none": 0.07226812131946557,
43
- "alias": " - ceval-valid_business_administration"
44
- },
45
- "ceval-valid_chinese_language_and_literature": {
46
- "acc,none": 0.2608695652173913,
47
- "acc_stderr,none": 0.09361833424764436,
48
- "acc_norm,none": 0.2608695652173913,
49
- "acc_norm_stderr,none": 0.09361833424764436,
50
- "alias": " - ceval-valid_chinese_language_and_literature"
51
- },
52
- "ceval-valid_civil_servant": {
53
- "acc,none": 0.2127659574468085,
54
- "acc_stderr,none": 0.060342609647735204,
55
- "acc_norm,none": 0.2127659574468085,
56
- "acc_norm_stderr,none": 0.060342609647735204,
57
- "alias": " - ceval-valid_civil_servant"
58
- },
59
- "ceval-valid_clinical_medicine": {
60
- "acc,none": 0.18181818181818182,
61
- "acc_stderr,none": 0.08416546361568647,
62
- "acc_norm,none": 0.18181818181818182,
63
- "acc_norm_stderr,none": 0.08416546361568647,
64
- "alias": " - ceval-valid_clinical_medicine"
65
- },
66
- "ceval-valid_college_chemistry": {
67
- "acc,none": 0.25,
68
- "acc_stderr,none": 0.09028938981432691,
69
- "acc_norm,none": 0.25,
70
- "acc_norm_stderr,none": 0.09028938981432691,
71
- "alias": " - ceval-valid_college_chemistry"
72
- },
73
- "ceval-valid_college_economics": {
74
- "acc,none": 0.32727272727272727,
75
- "acc_stderr,none": 0.0638524469869863,
76
- "acc_norm,none": 0.32727272727272727,
77
- "acc_norm_stderr,none": 0.0638524469869863,
78
- "alias": " - ceval-valid_college_economics"
79
- },
80
- "ceval-valid_college_physics": {
81
- "acc,none": 0.21052631578947367,
82
- "acc_stderr,none": 0.0960916767552923,
83
- "acc_norm,none": 0.21052631578947367,
84
- "acc_norm_stderr,none": 0.0960916767552923,
85
- "alias": " - ceval-valid_college_physics"
86
- },
87
- "ceval-valid_college_programming": {
88
- "acc,none": 0.24324324324324326,
89
- "acc_stderr,none": 0.07150679219093488,
90
- "acc_norm,none": 0.24324324324324326,
91
- "acc_norm_stderr,none": 0.07150679219093488,
92
- "alias": " - ceval-valid_college_programming"
93
- },
94
- "ceval-valid_computer_architecture": {
95
- "acc,none": 0.2857142857142857,
96
- "acc_stderr,none": 0.10101525445522108,
97
- "acc_norm,none": 0.2857142857142857,
98
- "acc_norm_stderr,none": 0.10101525445522108,
99
- "alias": " - ceval-valid_computer_architecture"
100
- },
101
- "ceval-valid_computer_network": {
102
- "acc,none": 0.42105263157894735,
103
- "acc_stderr,none": 0.11637279966159299,
104
- "acc_norm,none": 0.42105263157894735,
105
- "acc_norm_stderr,none": 0.11637279966159299,
106
- "alias": " - ceval-valid_computer_network"
107
- },
108
- "ceval-valid_discrete_mathematics": {
109
- "acc,none": 0.1875,
110
- "acc_stderr,none": 0.10077822185373188,
111
- "acc_norm,none": 0.1875,
112
- "acc_norm_stderr,none": 0.10077822185373188,
113
- "alias": " - ceval-valid_discrete_mathematics"
114
- },
115
- "ceval-valid_education_science": {
116
- "acc,none": 0.2413793103448276,
117
- "acc_stderr,none": 0.080869237238335,
118
- "acc_norm,none": 0.2413793103448276,
119
- "acc_norm_stderr,none": 0.080869237238335,
120
- "alias": " - ceval-valid_education_science"
121
- },
122
- "ceval-valid_electrical_engineer": {
123
- "acc,none": 0.35135135135135137,
124
- "acc_stderr,none": 0.0795654132101608,
125
- "acc_norm,none": 0.35135135135135137,
126
- "acc_norm_stderr,none": 0.0795654132101608,
127
- "alias": " - ceval-valid_electrical_engineer"
128
- },
129
- "ceval-valid_environmental_impact_assessment_engineer": {
130
- "acc,none": 0.25806451612903225,
131
- "acc_stderr,none": 0.07988892740217941,
132
- "acc_norm,none": 0.25806451612903225,
133
- "acc_norm_stderr,none": 0.07988892740217941,
134
- "alias": " - ceval-valid_environmental_impact_assessment_engineer"
135
- },
136
- "ceval-valid_fire_engineer": {
137
- "acc,none": 0.22580645161290322,
138
- "acc_stderr,none": 0.07633651333031763,
139
- "acc_norm,none": 0.22580645161290322,
140
- "acc_norm_stderr,none": 0.07633651333031763,
141
- "alias": " - ceval-valid_fire_engineer"
142
- },
143
- "ceval-valid_high_school_biology": {
144
- "acc,none": 0.21052631578947367,
145
- "acc_stderr,none": 0.0960916767552923,
146
- "acc_norm,none": 0.21052631578947367,
147
- "acc_norm_stderr,none": 0.0960916767552923,
148
- "alias": " - ceval-valid_high_school_biology"
149
- },
150
- "ceval-valid_high_school_chemistry": {
151
- "acc,none": 0.3684210526315789,
152
- "acc_stderr,none": 0.1136972052352256,
153
- "acc_norm,none": 0.3684210526315789,
154
- "acc_norm_stderr,none": 0.1136972052352256,
155
- "alias": " - ceval-valid_high_school_chemistry"
156
- },
157
- "ceval-valid_high_school_chinese": {
158
- "acc,none": 0.10526315789473684,
159
- "acc_stderr,none": 0.0723351864143449,
160
- "acc_norm,none": 0.10526315789473684,
161
- "acc_norm_stderr,none": 0.0723351864143449,
162
- "alias": " - ceval-valid_high_school_chinese"
163
- },
164
- "ceval-valid_high_school_geography": {
165
- "acc,none": 0.3157894736842105,
166
- "acc_stderr,none": 0.10956136839295434,
167
- "acc_norm,none": 0.3157894736842105,
168
- "acc_norm_stderr,none": 0.10956136839295434,
169
- "alias": " - ceval-valid_high_school_geography"
170
- },
171
- "ceval-valid_high_school_history": {
172
- "acc,none": 0.15,
173
- "acc_stderr,none": 0.0819178021909125,
174
- "acc_norm,none": 0.15,
175
- "acc_norm_stderr,none": 0.0819178021909125,
176
- "alias": " - ceval-valid_high_school_history"
177
- },
178
- "ceval-valid_high_school_mathematics": {
179
- "acc,none": 0.2777777777777778,
180
- "acc_stderr,none": 0.1086324845659782,
181
- "acc_norm,none": 0.2777777777777778,
182
- "acc_norm_stderr,none": 0.1086324845659782,
183
- "alias": " - ceval-valid_high_school_mathematics"
184
- },
185
- "ceval-valid_high_school_physics": {
186
- "acc,none": 0.3157894736842105,
187
- "acc_stderr,none": 0.10956136839295433,
188
- "acc_norm,none": 0.3157894736842105,
189
- "acc_norm_stderr,none": 0.10956136839295433,
190
- "alias": " - ceval-valid_high_school_physics"
191
- },
192
- "ceval-valid_high_school_politics": {
193
- "acc,none": 0.10526315789473684,
194
- "acc_stderr,none": 0.07233518641434492,
195
- "acc_norm,none": 0.10526315789473684,
196
- "acc_norm_stderr,none": 0.07233518641434492,
197
- "alias": " - ceval-valid_high_school_politics"
198
- },
199
- "ceval-valid_ideological_and_moral_cultivation": {
200
- "acc,none": 0.15789473684210525,
201
- "acc_stderr,none": 0.08594700851870798,
202
- "acc_norm,none": 0.15789473684210525,
203
- "acc_norm_stderr,none": 0.08594700851870798,
204
- "alias": " - ceval-valid_ideological_and_moral_cultivation"
205
- },
206
- "ceval-valid_law": {
207
- "acc,none": 0.3333333333333333,
208
- "acc_stderr,none": 0.0982946374365981,
209
- "acc_norm,none": 0.3333333333333333,
210
- "acc_norm_stderr,none": 0.0982946374365981,
211
- "alias": " - ceval-valid_law"
212
- },
213
- "ceval-valid_legal_professional": {
214
- "acc,none": 0.17391304347826086,
215
- "acc_stderr,none": 0.08081046758996391,
216
- "acc_norm,none": 0.17391304347826086,
217
- "acc_norm_stderr,none": 0.08081046758996391,
218
- "alias": " - ceval-valid_legal_professional"
219
- },
220
- "ceval-valid_logic": {
221
- "acc,none": 0.22727272727272727,
222
- "acc_stderr,none": 0.09144861547306321,
223
- "acc_norm,none": 0.22727272727272727,
224
- "acc_norm_stderr,none": 0.09144861547306321,
225
- "alias": " - ceval-valid_logic"
226
- },
227
- "ceval-valid_mao_zedong_thought": {
228
- "acc,none": 0.2916666666666667,
229
- "acc_stderr,none": 0.09477598811252415,
230
- "acc_norm,none": 0.2916666666666667,
231
- "acc_norm_stderr,none": 0.09477598811252415,
232
- "alias": " - ceval-valid_mao_zedong_thought"
233
- },
234
- "ceval-valid_marxism": {
235
- "acc,none": 0.21052631578947367,
236
- "acc_stderr,none": 0.0960916767552923,
237
- "acc_norm,none": 0.21052631578947367,
238
- "acc_norm_stderr,none": 0.0960916767552923,
239
- "alias": " - ceval-valid_marxism"
240
- },
241
- "ceval-valid_metrology_engineer": {
242
- "acc,none": 0.25,
243
- "acc_stderr,none": 0.09028938981432691,
244
- "acc_norm,none": 0.25,
245
- "acc_norm_stderr,none": 0.09028938981432691,
246
- "alias": " - ceval-valid_metrology_engineer"
247
- },
248
- "ceval-valid_middle_school_biology": {
249
- "acc,none": 0.23809523809523808,
250
- "acc_stderr,none": 0.09523809523809523,
251
- "acc_norm,none": 0.23809523809523808,
252
- "acc_norm_stderr,none": 0.09523809523809523,
253
- "alias": " - ceval-valid_middle_school_biology"
254
- },
255
- "ceval-valid_middle_school_chemistry": {
256
- "acc,none": 0.25,
257
- "acc_stderr,none": 0.09933992677987828,
258
- "acc_norm,none": 0.25,
259
- "acc_norm_stderr,none": 0.09933992677987828,
260
- "alias": " - ceval-valid_middle_school_chemistry"
261
- },
262
- "ceval-valid_middle_school_geography": {
263
- "acc,none": 0.08333333333333333,
264
- "acc_stderr,none": 0.08333333333333331,
265
- "acc_norm,none": 0.08333333333333333,
266
- "acc_norm_stderr,none": 0.08333333333333331,
267
- "alias": " - ceval-valid_middle_school_geography"
268
- },
269
- "ceval-valid_middle_school_history": {
270
- "acc,none": 0.45454545454545453,
271
- "acc_stderr,none": 0.10865714630312667,
272
- "acc_norm,none": 0.45454545454545453,
273
- "acc_norm_stderr,none": 0.10865714630312667,
274
- "alias": " - ceval-valid_middle_school_history"
275
- },
276
- "ceval-valid_middle_school_mathematics": {
277
- "acc,none": 0.3684210526315789,
278
- "acc_stderr,none": 0.11369720523522558,
279
- "acc_norm,none": 0.3684210526315789,
280
- "acc_norm_stderr,none": 0.11369720523522558,
281
- "alias": " - ceval-valid_middle_school_mathematics"
282
- },
283
- "ceval-valid_middle_school_physics": {
284
- "acc,none": 0.47368421052631576,
285
- "acc_stderr,none": 0.11768778828946262,
286
- "acc_norm,none": 0.47368421052631576,
287
- "acc_norm_stderr,none": 0.11768778828946262,
288
- "alias": " - ceval-valid_middle_school_physics"
289
- },
290
- "ceval-valid_middle_school_politics": {
291
- "acc,none": 0.2857142857142857,
292
- "acc_stderr,none": 0.10101525445522108,
293
- "acc_norm,none": 0.2857142857142857,
294
- "acc_norm_stderr,none": 0.10101525445522108,
295
- "alias": " - ceval-valid_middle_school_politics"
296
- },
297
- "ceval-valid_modern_chinese_history": {
298
- "acc,none": 0.21739130434782608,
299
- "acc_stderr,none": 0.08793911249520547,
300
- "acc_norm,none": 0.21739130434782608,
301
- "acc_norm_stderr,none": 0.08793911249520547,
302
- "alias": " - ceval-valid_modern_chinese_history"
303
- },
304
- "ceval-valid_operating_system": {
305
- "acc,none": 0.10526315789473684,
306
- "acc_stderr,none": 0.07233518641434492,
307
- "acc_norm,none": 0.10526315789473684,
308
- "acc_norm_stderr,none": 0.07233518641434492,
309
- "alias": " - ceval-valid_operating_system"
310
- },
311
- "ceval-valid_physician": {
312
- "acc,none": 0.20408163265306123,
313
- "acc_stderr,none": 0.05817221556628253,
314
- "acc_norm,none": 0.20408163265306123,
315
- "acc_norm_stderr,none": 0.05817221556628253,
316
- "alias": " - ceval-valid_physician"
317
- },
318
- "ceval-valid_plant_protection": {
319
- "acc,none": 0.3181818181818182,
320
- "acc_stderr,none": 0.10163945352271771,
321
- "acc_norm,none": 0.3181818181818182,
322
- "acc_norm_stderr,none": 0.10163945352271771,
323
- "alias": " - ceval-valid_plant_protection"
324
- },
325
- "ceval-valid_probability_and_statistics": {
326
- "acc,none": 0.2222222222222222,
327
- "acc_stderr,none": 0.1008316903303367,
328
- "acc_norm,none": 0.2222222222222222,
329
- "acc_norm_stderr,none": 0.1008316903303367,
330
- "alias": " - ceval-valid_probability_and_statistics"
331
- },
332
- "ceval-valid_professional_tour_guide": {
333
- "acc,none": 0.4482758620689655,
334
- "acc_stderr,none": 0.09398415777506855,
335
- "acc_norm,none": 0.4482758620689655,
336
- "acc_norm_stderr,none": 0.09398415777506855,
337
- "alias": " - ceval-valid_professional_tour_guide"
338
- },
339
- "ceval-valid_sports_science": {
340
- "acc,none": 0.15789473684210525,
341
- "acc_stderr,none": 0.08594700851870798,
342
- "acc_norm,none": 0.15789473684210525,
343
- "acc_norm_stderr,none": 0.08594700851870798,
344
- "alias": " - ceval-valid_sports_science"
345
- },
346
- "ceval-valid_tax_accountant": {
347
- "acc,none": 0.24489795918367346,
348
- "acc_stderr,none": 0.062069005411206316,
349
- "acc_norm,none": 0.24489795918367346,
350
- "acc_norm_stderr,none": 0.062069005411206316,
351
- "alias": " - ceval-valid_tax_accountant"
352
- },
353
- "ceval-valid_teacher_qualification": {
354
- "acc,none": 0.22727272727272727,
355
- "acc_stderr,none": 0.06390760676613884,
356
- "acc_norm,none": 0.22727272727272727,
357
- "acc_norm_stderr,none": 0.06390760676613884,
358
- "alias": " - ceval-valid_teacher_qualification"
359
- },
360
- "ceval-valid_urban_and_rural_planner": {
361
- "acc,none": 0.21739130434782608,
362
- "acc_stderr,none": 0.06148754619013454,
363
- "acc_norm,none": 0.21739130434782608,
364
- "acc_norm_stderr,none": 0.06148754619013454,
365
- "alias": " - ceval-valid_urban_and_rural_planner"
366
- },
367
- "ceval-valid_veterinary_medicine": {
368
- "acc,none": 0.13043478260869565,
369
- "acc_stderr,none": 0.07180198468215394,
370
- "acc_norm,none": 0.13043478260869565,
371
- "acc_norm_stderr,none": 0.07180198468215394,
372
- "alias": " - ceval-valid_veterinary_medicine"
373
- }
374
- },
375
- "groups": {
376
- "ceval-valid": {
377
- "acc,none": 0.24962852897473997,
378
- "acc_stderr,none": 0.11387085890117267,
379
- "acc_norm,none": 0.24962852897473997,
380
- "acc_norm_stderr,none": 0.11387085890117267,
381
- "alias": "ceval-valid"
382
- }
383
- },
384
- "configs": {
385
- "ceval-valid_accountant": {
386
- "task": "ceval-valid_accountant",
387
- "group": "ceval-valid",
388
- "dataset_path": "ceval/ceval-exam",
389
- "dataset_name": "accountant",
390
- "validation_split": "val",
391
- "fewshot_split": "dev",
392
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
393
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
394
- "doc_to_choice": [
395
- "A",
396
- "B",
397
- "C",
398
- "D"
399
- ],
400
- "description": "以下是中国关于注册会计师的单项选择题,请选出其中的正确答案。\n\n",
401
- "target_delimiter": " ",
402
- "fewshot_delimiter": "\n\n",
403
- "fewshot_config": {
404
- "sampler": "first_n"
405
- },
406
- "metric_list": [
407
- {
408
- "metric": "acc",
409
- "aggregation": "mean",
410
- "higher_is_better": true
411
- },
412
- {
413
- "metric": "acc_norm",
414
- "aggregation": "mean",
415
- "higher_is_better": true
416
- }
417
- ],
418
- "output_type": "multiple_choice",
419
- "repeats": 1,
420
- "should_decontaminate": false,
421
- "metadata": {
422
- "version": 1.0
423
- }
424
- },
425
- "ceval-valid_advanced_mathematics": {
426
- "task": "ceval-valid_advanced_mathematics",
427
- "group": "ceval-valid",
428
- "dataset_path": "ceval/ceval-exam",
429
- "dataset_name": "advanced_mathematics",
430
- "validation_split": "val",
431
- "fewshot_split": "dev",
432
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
433
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
434
- "doc_to_choice": [
435
- "A",
436
- "B",
437
- "C",
438
- "D"
439
- ],
440
- "description": "以下是中国关于高等数学的单项选择题,请选出其中的正确答案。\n\n",
441
- "target_delimiter": " ",
442
- "fewshot_delimiter": "\n\n",
443
- "fewshot_config": {
444
- "sampler": "first_n"
445
- },
446
- "metric_list": [
447
- {
448
- "metric": "acc",
449
- "aggregation": "mean",
450
- "higher_is_better": true
451
- },
452
- {
453
- "metric": "acc_norm",
454
- "aggregation": "mean",
455
- "higher_is_better": true
456
- }
457
- ],
458
- "output_type": "multiple_choice",
459
- "repeats": 1,
460
- "should_decontaminate": false,
461
- "metadata": {
462
- "version": 1.0
463
- }
464
- },
465
- "ceval-valid_art_studies": {
466
- "task": "ceval-valid_art_studies",
467
- "group": "ceval-valid",
468
- "dataset_path": "ceval/ceval-exam",
469
- "dataset_name": "art_studies",
470
- "validation_split": "val",
471
- "fewshot_split": "dev",
472
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
473
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
474
- "doc_to_choice": [
475
- "A",
476
- "B",
477
- "C",
478
- "D"
479
- ],
480
- "description": "以下是中国关于艺术学的单项选择题,请选出其中的正确答案。\n\n",
481
- "target_delimiter": " ",
482
- "fewshot_delimiter": "\n\n",
483
- "fewshot_config": {
484
- "sampler": "first_n"
485
- },
486
- "metric_list": [
487
- {
488
- "metric": "acc",
489
- "aggregation": "mean",
490
- "higher_is_better": true
491
- },
492
- {
493
- "metric": "acc_norm",
494
- "aggregation": "mean",
495
- "higher_is_better": true
496
- }
497
- ],
498
- "output_type": "multiple_choice",
499
- "repeats": 1,
500
- "should_decontaminate": false,
501
- "metadata": {
502
- "version": 1.0
503
- }
504
- },
505
- "ceval-valid_basic_medicine": {
506
- "task": "ceval-valid_basic_medicine",
507
- "group": "ceval-valid",
508
- "dataset_path": "ceval/ceval-exam",
509
- "dataset_name": "basic_medicine",
510
- "validation_split": "val",
511
- "fewshot_split": "dev",
512
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
513
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
514
- "doc_to_choice": [
515
- "A",
516
- "B",
517
- "C",
518
- "D"
519
- ],
520
- "description": "以下是中国关于基础医学的单项选择题,请选出其中的正确答案。\n\n",
521
- "target_delimiter": " ",
522
- "fewshot_delimiter": "\n\n",
523
- "fewshot_config": {
524
- "sampler": "first_n"
525
- },
526
- "metric_list": [
527
- {
528
- "metric": "acc",
529
- "aggregation": "mean",
530
- "higher_is_better": true
531
- },
532
- {
533
- "metric": "acc_norm",
534
- "aggregation": "mean",
535
- "higher_is_better": true
536
- }
537
- ],
538
- "output_type": "multiple_choice",
539
- "repeats": 1,
540
- "should_decontaminate": false,
541
- "metadata": {
542
- "version": 1.0
543
- }
544
- },
545
- "ceval-valid_business_administration": {
546
- "task": "ceval-valid_business_administration",
547
- "group": "ceval-valid",
548
- "dataset_path": "ceval/ceval-exam",
549
- "dataset_name": "business_administration",
550
- "validation_split": "val",
551
- "fewshot_split": "dev",
552
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答���:",
553
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
554
- "doc_to_choice": [
555
- "A",
556
- "B",
557
- "C",
558
- "D"
559
- ],
560
- "description": "以下是中国关于工商管理的单项选择题,请选出其中的正确答案。\n\n",
561
- "target_delimiter": " ",
562
- "fewshot_delimiter": "\n\n",
563
- "fewshot_config": {
564
- "sampler": "first_n"
565
- },
566
- "metric_list": [
567
- {
568
- "metric": "acc",
569
- "aggregation": "mean",
570
- "higher_is_better": true
571
- },
572
- {
573
- "metric": "acc_norm",
574
- "aggregation": "mean",
575
- "higher_is_better": true
576
- }
577
- ],
578
- "output_type": "multiple_choice",
579
- "repeats": 1,
580
- "should_decontaminate": false,
581
- "metadata": {
582
- "version": 1.0
583
- }
584
- },
585
- "ceval-valid_chinese_language_and_literature": {
586
- "task": "ceval-valid_chinese_language_and_literature",
587
- "group": "ceval-valid",
588
- "dataset_path": "ceval/ceval-exam",
589
- "dataset_name": "chinese_language_and_literature",
590
- "validation_split": "val",
591
- "fewshot_split": "dev",
592
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
593
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
594
- "doc_to_choice": [
595
- "A",
596
- "B",
597
- "C",
598
- "D"
599
- ],
600
- "description": "以下是中国关于中国语言文学的单项选择题,请选出其中的正确答案。\n\n",
601
- "target_delimiter": " ",
602
- "fewshot_delimiter": "\n\n",
603
- "fewshot_config": {
604
- "sampler": "first_n"
605
- },
606
- "metric_list": [
607
- {
608
- "metric": "acc",
609
- "aggregation": "mean",
610
- "higher_is_better": true
611
- },
612
- {
613
- "metric": "acc_norm",
614
- "aggregation": "mean",
615
- "higher_is_better": true
616
- }
617
- ],
618
- "output_type": "multiple_choice",
619
- "repeats": 1,
620
- "should_decontaminate": false,
621
- "metadata": {
622
- "version": 1.0
623
- }
624
- },
625
- "ceval-valid_civil_servant": {
626
- "task": "ceval-valid_civil_servant",
627
- "group": "ceval-valid",
628
- "dataset_path": "ceval/ceval-exam",
629
- "dataset_name": "civil_servant",
630
- "validation_split": "val",
631
- "fewshot_split": "dev",
632
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
633
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
634
- "doc_to_choice": [
635
- "A",
636
- "B",
637
- "C",
638
- "D"
639
- ],
640
- "description": "以下是中国关于公务员的单项选择题,请选出其中的正确答案。\n\n",
641
- "target_delimiter": " ",
642
- "fewshot_delimiter": "\n\n",
643
- "fewshot_config": {
644
- "sampler": "first_n"
645
- },
646
- "metric_list": [
647
- {
648
- "metric": "acc",
649
- "aggregation": "mean",
650
- "higher_is_better": true
651
- },
652
- {
653
- "metric": "acc_norm",
654
- "aggregation": "mean",
655
- "higher_is_better": true
656
- }
657
- ],
658
- "output_type": "multiple_choice",
659
- "repeats": 1,
660
- "should_decontaminate": false,
661
- "metadata": {
662
- "version": 1.0
663
- }
664
- },
665
- "ceval-valid_clinical_medicine": {
666
- "task": "ceval-valid_clinical_medicine",
667
- "group": "ceval-valid",
668
- "dataset_path": "ceval/ceval-exam",
669
- "dataset_name": "clinical_medicine",
670
- "validation_split": "val",
671
- "fewshot_split": "dev",
672
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
673
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
674
- "doc_to_choice": [
675
- "A",
676
- "B",
677
- "C",
678
- "D"
679
- ],
680
- "description": "以下是中国关于临床医学的单项选择题,请选出其中的正确答案。\n\n",
681
- "target_delimiter": " ",
682
- "fewshot_delimiter": "\n\n",
683
- "fewshot_config": {
684
- "sampler": "first_n"
685
- },
686
- "metric_list": [
687
- {
688
- "metric": "acc",
689
- "aggregation": "mean",
690
- "higher_is_better": true
691
- },
692
- {
693
- "metric": "acc_norm",
694
- "aggregation": "mean",
695
- "higher_is_better": true
696
- }
697
- ],
698
- "output_type": "multiple_choice",
699
- "repeats": 1,
700
- "should_decontaminate": false,
701
- "metadata": {
702
- "version": 1.0
703
- }
704
- },
705
- "ceval-valid_college_chemistry": {
706
- "task": "ceval-valid_college_chemistry",
707
- "group": "ceval-valid",
708
- "dataset_path": "ceval/ceval-exam",
709
- "dataset_name": "college_chemistry",
710
- "validation_split": "val",
711
- "fewshot_split": "dev",
712
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
713
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
714
- "doc_to_choice": [
715
- "A",
716
- "B",
717
- "C",
718
- "D"
719
- ],
720
- "description": "以下是中国关于��学化学的单项选择题,请选出其中的正确答案。\n\n",
721
- "target_delimiter": " ",
722
- "fewshot_delimiter": "\n\n",
723
- "fewshot_config": {
724
- "sampler": "first_n"
725
- },
726
- "metric_list": [
727
- {
728
- "metric": "acc",
729
- "aggregation": "mean",
730
- "higher_is_better": true
731
- },
732
- {
733
- "metric": "acc_norm",
734
- "aggregation": "mean",
735
- "higher_is_better": true
736
- }
737
- ],
738
- "output_type": "multiple_choice",
739
- "repeats": 1,
740
- "should_decontaminate": false,
741
- "metadata": {
742
- "version": 1.0
743
- }
744
- },
745
- "ceval-valid_college_economics": {
746
- "task": "ceval-valid_college_economics",
747
- "group": "ceval-valid",
748
- "dataset_path": "ceval/ceval-exam",
749
- "dataset_name": "college_economics",
750
- "validation_split": "val",
751
- "fewshot_split": "dev",
752
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
753
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
754
- "doc_to_choice": [
755
- "A",
756
- "B",
757
- "C",
758
- "D"
759
- ],
760
- "description": "以下是中国关于大学经济学的单项选择题,请选出其中的正确答案。\n\n",
761
- "target_delimiter": " ",
762
- "fewshot_delimiter": "\n\n",
763
- "fewshot_config": {
764
- "sampler": "first_n"
765
- },
766
- "metric_list": [
767
- {
768
- "metric": "acc",
769
- "aggregation": "mean",
770
- "higher_is_better": true
771
- },
772
- {
773
- "metric": "acc_norm",
774
- "aggregation": "mean",
775
- "higher_is_better": true
776
- }
777
- ],
778
- "output_type": "multiple_choice",
779
- "repeats": 1,
780
- "should_decontaminate": false,
781
- "metadata": {
782
- "version": 1.0
783
- }
784
- },
785
- "ceval-valid_college_physics": {
786
- "task": "ceval-valid_college_physics",
787
- "group": "ceval-valid",
788
- "dataset_path": "ceval/ceval-exam",
789
- "dataset_name": "college_physics",
790
- "validation_split": "val",
791
- "fewshot_split": "dev",
792
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
793
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
794
- "doc_to_choice": [
795
- "A",
796
- "B",
797
- "C",
798
- "D"
799
- ],
800
- "description": "以下是中国关于大学物理的单项选择题,请选出其中的正确答案。\n\n",
801
- "target_delimiter": " ",
802
- "fewshot_delimiter": "\n\n",
803
- "fewshot_config": {
804
- "sampler": "first_n"
805
- },
806
- "metric_list": [
807
- {
808
- "metric": "acc",
809
- "aggregation": "mean",
810
- "higher_is_better": true
811
- },
812
- {
813
- "metric": "acc_norm",
814
- "aggregation": "mean",
815
- "higher_is_better": true
816
- }
817
- ],
818
- "output_type": "multiple_choice",
819
- "repeats": 1,
820
- "should_decontaminate": false,
821
- "metadata": {
822
- "version": 1.0
823
- }
824
- },
825
- "ceval-valid_college_programming": {
826
- "task": "ceval-valid_college_programming",
827
- "group": "ceval-valid",
828
- "dataset_path": "ceval/ceval-exam",
829
- "dataset_name": "college_programming",
830
- "validation_split": "val",
831
- "fewshot_split": "dev",
832
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
833
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
834
- "doc_to_choice": [
835
- "A",
836
- "B",
837
- "C",
838
- "D"
839
- ],
840
- "description": "以下是中国关于大学编程的单项选择题,请选出其中的正确答案。\n\n",
841
- "target_delimiter": " ",
842
- "fewshot_delimiter": "\n\n",
843
- "fewshot_config": {
844
- "sampler": "first_n"
845
- },
846
- "metric_list": [
847
- {
848
- "metric": "acc",
849
- "aggregation": "mean",
850
- "higher_is_better": true
851
- },
852
- {
853
- "metric": "acc_norm",
854
- "aggregation": "mean",
855
- "higher_is_better": true
856
- }
857
- ],
858
- "output_type": "multiple_choice",
859
- "repeats": 1,
860
- "should_decontaminate": false,
861
- "metadata": {
862
- "version": 1.0
863
- }
864
- },
865
- "ceval-valid_computer_architecture": {
866
- "task": "ceval-valid_computer_architecture",
867
- "group": "ceval-valid",
868
- "dataset_path": "ceval/ceval-exam",
869
- "dataset_name": "computer_architecture",
870
- "validation_split": "val",
871
- "fewshot_split": "dev",
872
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
873
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
874
- "doc_to_choice": [
875
- "A",
876
- "B",
877
- "C",
878
- "D"
879
- ],
880
- "description": "以下是中国关于计算机组成的单项选择题,请选出其中的正确答案。\n\n",
881
- "target_delimiter": " ",
882
- "fewshot_delimiter": "\n\n",
883
- "fewshot_config": {
884
- "sampler": "first_n"
885
- },
886
- "metric_list": [
887
- {
888
- "metric": "acc",
889
- "aggregation": "mean",
890
- "higher_is_better": true
891
- },
892
- {
893
- "metric": "acc_norm",
894
- "aggregation": "mean",
895
- "higher_is_better": true
896
- }
897
- ],
898
- "output_type": "multiple_choice",
899
- "repeats": 1,
900
- "should_decontaminate": false,
901
- "metadata": {
902
- "version": 1.0
903
- }
904
- },
905
- "ceval-valid_computer_network": {
906
- "task": "ceval-valid_computer_network",
907
- "group": "ceval-valid",
908
- "dataset_path": "ceval/ceval-exam",
909
- "dataset_name": "computer_network",
910
- "validation_split": "val",
911
- "fewshot_split": "dev",
912
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
913
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
914
- "doc_to_choice": [
915
- "A",
916
- "B",
917
- "C",
918
- "D"
919
- ],
920
- "description": "以下是中国关于计算机网络的单项选择题,请选出其中的正确答案。\n\n",
921
- "target_delimiter": " ",
922
- "fewshot_delimiter": "\n\n",
923
- "fewshot_config": {
924
- "sampler": "first_n"
925
- },
926
- "metric_list": [
927
- {
928
- "metric": "acc",
929
- "aggregation": "mean",
930
- "higher_is_better": true
931
- },
932
- {
933
- "metric": "acc_norm",
934
- "aggregation": "mean",
935
- "higher_is_better": true
936
- }
937
- ],
938
- "output_type": "multiple_choice",
939
- "repeats": 1,
940
- "should_decontaminate": false,
941
- "metadata": {
942
- "version": 1.0
943
- }
944
- },
945
- "ceval-valid_discrete_mathematics": {
946
- "task": "ceval-valid_discrete_mathematics",
947
- "group": "ceval-valid",
948
- "dataset_path": "ceval/ceval-exam",
949
- "dataset_name": "discrete_mathematics",
950
- "validation_split": "val",
951
- "fewshot_split": "dev",
952
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
953
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
954
- "doc_to_choice": [
955
- "A",
956
- "B",
957
- "C",
958
- "D"
959
- ],
960
- "description": "以下是中国关于离散数学的单项选择题,请选出其中的正确答案。\n\n",
961
- "target_delimiter": " ",
962
- "fewshot_delimiter": "\n\n",
963
- "fewshot_config": {
964
- "sampler": "first_n"
965
- },
966
- "metric_list": [
967
- {
968
- "metric": "acc",
969
- "aggregation": "mean",
970
- "higher_is_better": true
971
- },
972
- {
973
- "metric": "acc_norm",
974
- "aggregation": "mean",
975
- "higher_is_better": true
976
- }
977
- ],
978
- "output_type": "multiple_choice",
979
- "repeats": 1,
980
- "should_decontaminate": false,
981
- "metadata": {
982
- "version": 1.0
983
- }
984
- },
985
- "ceval-valid_education_science": {
986
- "task": "ceval-valid_education_science",
987
- "group": "ceval-valid",
988
- "dataset_path": "ceval/ceval-exam",
989
- "dataset_name": "education_science",
990
- "validation_split": "val",
991
- "fewshot_split": "dev",
992
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
993
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
994
- "doc_to_choice": [
995
- "A",
996
- "B",
997
- "C",
998
- "D"
999
- ],
1000
- "description": "以下是中国关于教育学的单项选择题,请选出其中的正确答案。\n\n",
1001
- "target_delimiter": " ",
1002
- "fewshot_delimiter": "\n\n",
1003
- "fewshot_config": {
1004
- "sampler": "first_n"
1005
- },
1006
- "metric_list": [
1007
- {
1008
- "metric": "acc",
1009
- "aggregation": "mean",
1010
- "higher_is_better": true
1011
- },
1012
- {
1013
- "metric": "acc_norm",
1014
- "aggregation": "mean",
1015
- "higher_is_better": true
1016
- }
1017
- ],
1018
- "output_type": "multiple_choice",
1019
- "repeats": 1,
1020
- "should_decontaminate": false,
1021
- "metadata": {
1022
- "version": 1.0
1023
- }
1024
- },
1025
- "ceval-valid_electrical_engineer": {
1026
- "task": "ceval-valid_electrical_engineer",
1027
- "group": "ceval-valid",
1028
- "dataset_path": "ceval/ceval-exam",
1029
- "dataset_name": "electrical_engineer",
1030
- "validation_split": "val",
1031
- "fewshot_split": "dev",
1032
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1033
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1034
- "doc_to_choice": [
1035
- "A",
1036
- "B",
1037
- "C",
1038
- "D"
1039
- ],
1040
- "description": "以下是中国关于注册电气工程师的单项选择题,请选出其中的正确答案。\n\n",
1041
- "target_delimiter": " ",
1042
- "fewshot_delimiter": "\n\n",
1043
- "fewshot_config": {
1044
- "sampler": "first_n"
1045
- },
1046
- "metric_list": [
1047
- {
1048
- "metric": "acc",
1049
- "aggregation": "mean",
1050
- "higher_is_better": true
1051
- },
1052
- {
1053
- "metric": "acc_norm",
1054
- "aggregation": "mean",
1055
- "higher_is_better": true
1056
- }
1057
- ],
1058
- "output_type": "multiple_choice",
1059
- "repeats": 1,
1060
- "should_decontaminate": false,
1061
- "metadata": {
1062
- "version": 1.0
1063
- }
1064
- },
1065
- "ceval-valid_environmental_impact_assessment_engineer": {
1066
- "task": "ceval-valid_environmental_impact_assessment_engineer",
1067
- "group": "ceval-valid",
1068
- "dataset_path": "ceval/ceval-exam",
1069
- "dataset_name": "environmental_impact_assessment_engineer",
1070
- "validation_split": "val",
1071
- "fewshot_split": "dev",
1072
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1073
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1074
- "doc_to_choice": [
1075
- "A",
1076
- "B",
1077
- "C",
1078
- "D"
1079
- ],
1080
- "description": "以下是中国关于环境影响评价工程师的单项选择题,请选出其中的正确答案。\n\n",
1081
- "target_delimiter": " ",
1082
- "fewshot_delimiter": "\n\n",
1083
- "fewshot_config": {
1084
- "sampler": "first_n"
1085
- },
1086
- "metric_list": [
1087
- {
1088
- "metric": "acc",
1089
- "aggregation": "mean",
1090
- "higher_is_better": true
1091
- },
1092
- {
1093
- "metric": "acc_norm",
1094
- "aggregation": "mean",
1095
- "higher_is_better": true
1096
- }
1097
- ],
1098
- "output_type": "multiple_choice",
1099
- "repeats": 1,
1100
- "should_decontaminate": false,
1101
- "metadata": {
1102
- "version": 1.0
1103
- }
1104
- },
1105
- "ceval-valid_fire_engineer": {
1106
- "task": "ceval-valid_fire_engineer",
1107
- "group": "ceval-valid",
1108
- "dataset_path": "ceval/ceval-exam",
1109
- "dataset_name": "fire_engineer",
1110
- "validation_split": "val",
1111
- "fewshot_split": "dev",
1112
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1113
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1114
- "doc_to_choice": [
1115
- "A",
1116
- "B",
1117
- "C",
1118
- "D"
1119
- ],
1120
- "description": "以下是中国关于注册消防工程师的单项选择题,请选出其中的正确答案。\n\n",
1121
- "target_delimiter": " ",
1122
- "fewshot_delimiter": "\n\n",
1123
- "fewshot_config": {
1124
- "sampler": "first_n"
1125
- },
1126
- "metric_list": [
1127
- {
1128
- "metric": "acc",
1129
- "aggregation": "mean",
1130
- "higher_is_better": true
1131
- },
1132
- {
1133
- "metric": "acc_norm",
1134
- "aggregation": "mean",
1135
- "higher_is_better": true
1136
- }
1137
- ],
1138
- "output_type": "multiple_choice",
1139
- "repeats": 1,
1140
- "should_decontaminate": false,
1141
- "metadata": {
1142
- "version": 1.0
1143
- }
1144
- },
1145
- "ceval-valid_high_school_biology": {
1146
- "task": "ceval-valid_high_school_biology",
1147
- "group": "ceval-valid",
1148
- "dataset_path": "ceval/ceval-exam",
1149
- "dataset_name": "high_school_biology",
1150
- "validation_split": "val",
1151
- "fewshot_split": "dev",
1152
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1153
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1154
- "doc_to_choice": [
1155
- "A",
1156
- "B",
1157
- "C",
1158
- "D"
1159
- ],
1160
- "description": "以下是中国关于高中生物的单项选择题,请选出其中的正确答案。\n\n",
1161
- "target_delimiter": " ",
1162
- "fewshot_delimiter": "\n\n",
1163
- "fewshot_config": {
1164
- "sampler": "first_n"
1165
- },
1166
- "metric_list": [
1167
- {
1168
- "metric": "acc",
1169
- "aggregation": "mean",
1170
- "higher_is_better": true
1171
- },
1172
- {
1173
- "metric": "acc_norm",
1174
- "aggregation": "mean",
1175
- "higher_is_better": true
1176
- }
1177
- ],
1178
- "output_type": "multiple_choice",
1179
- "repeats": 1,
1180
- "should_decontaminate": false,
1181
- "metadata": {
1182
- "version": 1.0
1183
- }
1184
- },
1185
- "ceval-valid_high_school_chemistry": {
1186
- "task": "ceval-valid_high_school_chemistry",
1187
- "group": "ceval-valid",
1188
- "dataset_path": "ceval/ceval-exam",
1189
- "dataset_name": "high_school_chemistry",
1190
- "validation_split": "val",
1191
- "fewshot_split": "dev",
1192
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1193
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1194
- "doc_to_choice": [
1195
- "A",
1196
- "B",
1197
- "C",
1198
- "D"
1199
- ],
1200
- "description": "以下是中国关于高中化学的单项选择题,请选出其中的正确答案。\n\n",
1201
- "target_delimiter": " ",
1202
- "fewshot_delimiter": "\n\n",
1203
- "fewshot_config": {
1204
- "sampler": "first_n"
1205
- },
1206
- "metric_list": [
1207
- {
1208
- "metric": "acc",
1209
- "aggregation": "mean",
1210
- "higher_is_better": true
1211
- },
1212
- {
1213
- "metric": "acc_norm",
1214
- "aggregation": "mean",
1215
- "higher_is_better": true
1216
- }
1217
- ],
1218
- "output_type": "multiple_choice",
1219
- "repeats": 1,
1220
- "should_decontaminate": false,
1221
- "metadata": {
1222
- "version": 1.0
1223
- }
1224
- },
1225
- "ceval-valid_high_school_chinese": {
1226
- "task": "ceval-valid_high_school_chinese",
1227
- "group": "ceval-valid",
1228
- "dataset_path": "ceval/ceval-exam",
1229
- "dataset_name": "high_school_chinese",
1230
- "validation_split": "val",
1231
- "fewshot_split": "dev",
1232
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1233
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1234
- "doc_to_choice": [
1235
- "A",
1236
- "B",
1237
- "C",
1238
- "D"
1239
- ],
1240
- "description": "以下是中国关于高中语文的单项选择题,请选出其中的正确答案。\n\n",
1241
- "target_delimiter": " ",
1242
- "fewshot_delimiter": "\n\n",
1243
- "fewshot_config": {
1244
- "sampler": "first_n"
1245
- },
1246
- "metric_list": [
1247
- {
1248
- "metric": "acc",
1249
- "aggregation": "mean",
1250
- "higher_is_better": true
1251
- },
1252
- {
1253
- "metric": "acc_norm",
1254
- "aggregation": "mean",
1255
- "higher_is_better": true
1256
- }
1257
- ],
1258
- "output_type": "multiple_choice",
1259
- "repeats": 1,
1260
- "should_decontaminate": false,
1261
- "metadata": {
1262
- "version": 1.0
1263
- }
1264
- },
1265
- "ceval-valid_high_school_geography": {
1266
- "task": "ceval-valid_high_school_geography",
1267
- "group": "ceval-valid",
1268
- "dataset_path": "ceval/ceval-exam",
1269
- "dataset_name": "high_school_geography",
1270
- "validation_split": "val",
1271
- "fewshot_split": "dev",
1272
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1273
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1274
- "doc_to_choice": [
1275
- "A",
1276
- "B",
1277
- "C",
1278
- "D"
1279
- ],
1280
- "description": "以下是中国关于高中地理的单项选择题,请选出其中的正确答案。\n\n",
1281
- "target_delimiter": " ",
1282
- "fewshot_delimiter": "\n\n",
1283
- "fewshot_config": {
1284
- "sampler": "first_n"
1285
- },
1286
- "metric_list": [
1287
- {
1288
- "metric": "acc",
1289
- "aggregation": "mean",
1290
- "higher_is_better": true
1291
- },
1292
- {
1293
- "metric": "acc_norm",
1294
- "aggregation": "mean",
1295
- "higher_is_better": true
1296
- }
1297
- ],
1298
- "output_type": "multiple_choice",
1299
- "repeats": 1,
1300
- "should_decontaminate": false,
1301
- "metadata": {
1302
- "version": 1.0
1303
- }
1304
- },
1305
- "ceval-valid_high_school_history": {
1306
- "task": "ceval-valid_high_school_history",
1307
- "group": "ceval-valid",
1308
- "dataset_path": "ceval/ceval-exam",
1309
- "dataset_name": "high_school_history",
1310
- "validation_split": "val",
1311
- "fewshot_split": "dev",
1312
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1313
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1314
- "doc_to_choice": [
1315
- "A",
1316
- "B",
1317
- "C",
1318
- "D"
1319
- ],
1320
- "description": "以下是中国关于高中历史的单项选择题,请选出其中的正确答案。\n\n",
1321
- "target_delimiter": " ",
1322
- "fewshot_delimiter": "\n\n",
1323
- "fewshot_config": {
1324
- "sampler": "first_n"
1325
- },
1326
- "metric_list": [
1327
- {
1328
- "metric": "acc",
1329
- "aggregation": "mean",
1330
- "higher_is_better": true
1331
- },
1332
- {
1333
- "metric": "acc_norm",
1334
- "aggregation": "mean",
1335
- "higher_is_better": true
1336
- }
1337
- ],
1338
- "output_type": "multiple_choice",
1339
- "repeats": 1,
1340
- "should_decontaminate": false,
1341
- "metadata": {
1342
- "version": 1.0
1343
- }
1344
- },
1345
- "ceval-valid_high_school_mathematics": {
1346
- "task": "ceval-valid_high_school_mathematics",
1347
- "group": "ceval-valid",
1348
- "dataset_path": "ceval/ceval-exam",
1349
- "dataset_name": "high_school_mathematics",
1350
- "validation_split": "val",
1351
- "fewshot_split": "dev",
1352
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1353
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1354
- "doc_to_choice": [
1355
- "A",
1356
- "B",
1357
- "C",
1358
- "D"
1359
- ],
1360
- "description": "以下是中国关于高中数学的单项选择题,请选出其中的正确答案。\n\n",
1361
- "target_delimiter": " ",
1362
- "fewshot_delimiter": "\n\n",
1363
- "fewshot_config": {
1364
- "sampler": "first_n"
1365
- },
1366
- "metric_list": [
1367
- {
1368
- "metric": "acc",
1369
- "aggregation": "mean",
1370
- "higher_is_better": true
1371
- },
1372
- {
1373
- "metric": "acc_norm",
1374
- "aggregation": "mean",
1375
- "higher_is_better": true
1376
- }
1377
- ],
1378
- "output_type": "multiple_choice",
1379
- "repeats": 1,
1380
- "should_decontaminate": false,
1381
- "metadata": {
1382
- "version": 1.0
1383
- }
1384
- },
1385
- "ceval-valid_high_school_physics": {
1386
- "task": "ceval-valid_high_school_physics",
1387
- "group": "ceval-valid",
1388
- "dataset_path": "ceval/ceval-exam",
1389
- "dataset_name": "high_school_physics",
1390
- "validation_split": "val",
1391
- "fewshot_split": "dev",
1392
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1393
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1394
- "doc_to_choice": [
1395
- "A",
1396
- "B",
1397
- "C",
1398
- "D"
1399
- ],
1400
- "description": "以下是中国关于高中物理的单项选择题,请选出其中的正确答案。\n\n",
1401
- "target_delimiter": " ",
1402
- "fewshot_delimiter": "\n\n",
1403
- "fewshot_config": {
1404
- "sampler": "first_n"
1405
- },
1406
- "metric_list": [
1407
- {
1408
- "metric": "acc",
1409
- "aggregation": "mean",
1410
- "higher_is_better": true
1411
- },
1412
- {
1413
- "metric": "acc_norm",
1414
- "aggregation": "mean",
1415
- "higher_is_better": true
1416
- }
1417
- ],
1418
- "output_type": "multiple_choice",
1419
- "repeats": 1,
1420
- "should_decontaminate": false,
1421
- "metadata": {
1422
- "version": 1.0
1423
- }
1424
- },
1425
- "ceval-valid_high_school_politics": {
1426
- "task": "ceval-valid_high_school_politics",
1427
- "group": "ceval-valid",
1428
- "dataset_path": "ceval/ceval-exam",
1429
- "dataset_name": "high_school_politics",
1430
- "validation_split": "val",
1431
- "fewshot_split": "dev",
1432
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1433
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1434
- "doc_to_choice": [
1435
- "A",
1436
- "B",
1437
- "C",
1438
- "D"
1439
- ],
1440
- "description": "以下是中国关于高中政治的单项选择题,请选出其中的正确答案。\n\n",
1441
- "target_delimiter": " ",
1442
- "fewshot_delimiter": "\n\n",
1443
- "fewshot_config": {
1444
- "sampler": "first_n"
1445
- },
1446
- "metric_list": [
1447
- {
1448
- "metric": "acc",
1449
- "aggregation": "mean",
1450
- "higher_is_better": true
1451
- },
1452
- {
1453
- "metric": "acc_norm",
1454
- "aggregation": "mean",
1455
- "higher_is_better": true
1456
- }
1457
- ],
1458
- "output_type": "multiple_choice",
1459
- "repeats": 1,
1460
- "should_decontaminate": false,
1461
- "metadata": {
1462
- "version": 1.0
1463
- }
1464
- },
1465
- "ceval-valid_ideological_and_moral_cultivation": {
1466
- "task": "ceval-valid_ideological_and_moral_cultivation",
1467
- "group": "ceval-valid",
1468
- "dataset_path": "ceval/ceval-exam",
1469
- "dataset_name": "ideological_and_moral_cultivation",
1470
- "validation_split": "val",
1471
- "fewshot_split": "dev",
1472
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1473
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1474
- "doc_to_choice": [
1475
- "A",
1476
- "B",
1477
- "C",
1478
- "D"
1479
- ],
1480
- "description": "以下是中国关于思想道德修养与法律基础的单项选择题,请选出其中的正确答案。\n\n",
1481
- "target_delimiter": " ",
1482
- "fewshot_delimiter": "\n\n",
1483
- "fewshot_config": {
1484
- "sampler": "first_n"
1485
- },
1486
- "metric_list": [
1487
- {
1488
- "metric": "acc",
1489
- "aggregation": "mean",
1490
- "higher_is_better": true
1491
- },
1492
- {
1493
- "metric": "acc_norm",
1494
- "aggregation": "mean",
1495
- "higher_is_better": true
1496
- }
1497
- ],
1498
- "output_type": "multiple_choice",
1499
- "repeats": 1,
1500
- "should_decontaminate": false,
1501
- "metadata": {
1502
- "version": 1.0
1503
- }
1504
- },
1505
- "ceval-valid_law": {
1506
- "task": "ceval-valid_law",
1507
- "group": "ceval-valid",
1508
- "dataset_path": "ceval/ceval-exam",
1509
- "dataset_name": "law",
1510
- "validation_split": "val",
1511
- "fewshot_split": "dev",
1512
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1513
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1514
- "doc_to_choice": [
1515
- "A",
1516
- "B",
1517
- "C",
1518
- "D"
1519
- ],
1520
- "description": "以下是中国关于法学的单项选择题,请选出其中的正确答案。\n\n",
1521
- "target_delimiter": " ",
1522
- "fewshot_delimiter": "\n\n",
1523
- "fewshot_config": {
1524
- "sampler": "first_n"
1525
- },
1526
- "metric_list": [
1527
- {
1528
- "metric": "acc",
1529
- "aggregation": "mean",
1530
- "higher_is_better": true
1531
- },
1532
- {
1533
- "metric": "acc_norm",
1534
- "aggregation": "mean",
1535
- "higher_is_better": true
1536
- }
1537
- ],
1538
- "output_type": "multiple_choice",
1539
- "repeats": 1,
1540
- "should_decontaminate": false,
1541
- "metadata": {
1542
- "version": 1.0
1543
- }
1544
- },
1545
- "ceval-valid_legal_professional": {
1546
- "task": "ceval-valid_legal_professional",
1547
- "group": "ceval-valid",
1548
- "dataset_path": "ceval/ceval-exam",
1549
- "dataset_name": "legal_professional",
1550
- "validation_split": "val",
1551
- "fewshot_split": "dev",
1552
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1553
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1554
- "doc_to_choice": [
1555
- "A",
1556
- "B",
1557
- "C",
1558
- "D"
1559
- ],
1560
- "description": "以下是中国关于法律职业资格的单项选择题,请选出其中的正确答案。\n\n",
1561
- "target_delimiter": " ",
1562
- "fewshot_delimiter": "\n\n",
1563
- "fewshot_config": {
1564
- "sampler": "first_n"
1565
- },
1566
- "metric_list": [
1567
- {
1568
- "metric": "acc",
1569
- "aggregation": "mean",
1570
- "higher_is_better": true
1571
- },
1572
- {
1573
- "metric": "acc_norm",
1574
- "aggregation": "mean",
1575
- "higher_is_better": true
1576
- }
1577
- ],
1578
- "output_type": "multiple_choice",
1579
- "repeats": 1,
1580
- "should_decontaminate": false,
1581
- "metadata": {
1582
- "version": 1.0
1583
- }
1584
- },
1585
- "ceval-valid_logic": {
1586
- "task": "ceval-valid_logic",
1587
- "group": "ceval-valid",
1588
- "dataset_path": "ceval/ceval-exam",
1589
- "dataset_name": "logic",
1590
- "validation_split": "val",
1591
- "fewshot_split": "dev",
1592
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1593
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1594
- "doc_to_choice": [
1595
- "A",
1596
- "B",
1597
- "C",
1598
- "D"
1599
- ],
1600
- "description": "以下是中国关于逻辑学的单项选择题,请选出其中的正确答案。\n\n",
1601
- "target_delimiter": " ",
1602
- "fewshot_delimiter": "\n\n",
1603
- "fewshot_config": {
1604
- "sampler": "first_n"
1605
- },
1606
- "metric_list": [
1607
- {
1608
- "metric": "acc",
1609
- "aggregation": "mean",
1610
- "higher_is_better": true
1611
- },
1612
- {
1613
- "metric": "acc_norm",
1614
- "aggregation": "mean",
1615
- "higher_is_better": true
1616
- }
1617
- ],
1618
- "output_type": "multiple_choice",
1619
- "repeats": 1,
1620
- "should_decontaminate": false,
1621
- "metadata": {
1622
- "version": 1.0
1623
- }
1624
- },
1625
- "ceval-valid_mao_zedong_thought": {
1626
- "task": "ceval-valid_mao_zedong_thought",
1627
- "group": "ceval-valid",
1628
- "dataset_path": "ceval/ceval-exam",
1629
- "dataset_name": "mao_zedong_thought",
1630
- "validation_split": "val",
1631
- "fewshot_split": "dev",
1632
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1633
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1634
- "doc_to_choice": [
1635
- "A",
1636
- "B",
1637
- "C",
1638
- "D"
1639
- ],
1640
- "description": "以下是中国关于毛泽东思想和中国特色社会主义理论体系概论的单项选择题,请选出其中的正确答案。\n\n",
1641
- "target_delimiter": " ",
1642
- "fewshot_delimiter": "\n\n",
1643
- "fewshot_config": {
1644
- "sampler": "first_n"
1645
- },
1646
- "metric_list": [
1647
- {
1648
- "metric": "acc",
1649
- "aggregation": "mean",
1650
- "higher_is_better": true
1651
- },
1652
- {
1653
- "metric": "acc_norm",
1654
- "aggregation": "mean",
1655
- "higher_is_better": true
1656
- }
1657
- ],
1658
- "output_type": "multiple_choice",
1659
- "repeats": 1,
1660
- "should_decontaminate": false,
1661
- "metadata": {
1662
- "version": 1.0
1663
- }
1664
- },
1665
- "ceval-valid_marxism": {
1666
- "task": "ceval-valid_marxism",
1667
- "group": "ceval-valid",
1668
- "dataset_path": "ceval/ceval-exam",
1669
- "dataset_name": "marxism",
1670
- "validation_split": "val",
1671
- "fewshot_split": "dev",
1672
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1673
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1674
- "doc_to_choice": [
1675
- "A",
1676
- "B",
1677
- "C",
1678
- "D"
1679
- ],
1680
- "description": "以下是中国关于马克思主义基本原理的单项选择题,请选出其中的正确答案。\n\n",
1681
- "target_delimiter": " ",
1682
- "fewshot_delimiter": "\n\n",
1683
- "fewshot_config": {
1684
- "sampler": "first_n"
1685
- },
1686
- "metric_list": [
1687
- {
1688
- "metric": "acc",
1689
- "aggregation": "mean",
1690
- "higher_is_better": true
1691
- },
1692
- {
1693
- "metric": "acc_norm",
1694
- "aggregation": "mean",
1695
- "higher_is_better": true
1696
- }
1697
- ],
1698
- "output_type": "multiple_choice",
1699
- "repeats": 1,
1700
- "should_decontaminate": false,
1701
- "metadata": {
1702
- "version": 1.0
1703
- }
1704
- },
1705
- "ceval-valid_metrology_engineer": {
1706
- "task": "ceval-valid_metrology_engineer",
1707
- "group": "ceval-valid",
1708
- "dataset_path": "ceval/ceval-exam",
1709
- "dataset_name": "metrology_engineer",
1710
- "validation_split": "val",
1711
- "fewshot_split": "dev",
1712
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1713
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1714
- "doc_to_choice": [
1715
- "A",
1716
- "B",
1717
- "C",
1718
- "D"
1719
- ],
1720
- "description": "以下是中国关于注册计量师的单���选择题,请选出其中的正确答案。\n\n",
1721
- "target_delimiter": " ",
1722
- "fewshot_delimiter": "\n\n",
1723
- "fewshot_config": {
1724
- "sampler": "first_n"
1725
- },
1726
- "metric_list": [
1727
- {
1728
- "metric": "acc",
1729
- "aggregation": "mean",
1730
- "higher_is_better": true
1731
- },
1732
- {
1733
- "metric": "acc_norm",
1734
- "aggregation": "mean",
1735
- "higher_is_better": true
1736
- }
1737
- ],
1738
- "output_type": "multiple_choice",
1739
- "repeats": 1,
1740
- "should_decontaminate": false,
1741
- "metadata": {
1742
- "version": 1.0
1743
- }
1744
- },
1745
- "ceval-valid_middle_school_biology": {
1746
- "task": "ceval-valid_middle_school_biology",
1747
- "group": "ceval-valid",
1748
- "dataset_path": "ceval/ceval-exam",
1749
- "dataset_name": "middle_school_biology",
1750
- "validation_split": "val",
1751
- "fewshot_split": "dev",
1752
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1753
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1754
- "doc_to_choice": [
1755
- "A",
1756
- "B",
1757
- "C",
1758
- "D"
1759
- ],
1760
- "description": "以下是中国关于初中生物的单项选择题,请选出其中的正确答案。\n\n",
1761
- "target_delimiter": " ",
1762
- "fewshot_delimiter": "\n\n",
1763
- "fewshot_config": {
1764
- "sampler": "first_n"
1765
- },
1766
- "metric_list": [
1767
- {
1768
- "metric": "acc",
1769
- "aggregation": "mean",
1770
- "higher_is_better": true
1771
- },
1772
- {
1773
- "metric": "acc_norm",
1774
- "aggregation": "mean",
1775
- "higher_is_better": true
1776
- }
1777
- ],
1778
- "output_type": "multiple_choice",
1779
- "repeats": 1,
1780
- "should_decontaminate": false,
1781
- "metadata": {
1782
- "version": 1.0
1783
- }
1784
- },
1785
- "ceval-valid_middle_school_chemistry": {
1786
- "task": "ceval-valid_middle_school_chemistry",
1787
- "group": "ceval-valid",
1788
- "dataset_path": "ceval/ceval-exam",
1789
- "dataset_name": "middle_school_chemistry",
1790
- "validation_split": "val",
1791
- "fewshot_split": "dev",
1792
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1793
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1794
- "doc_to_choice": [
1795
- "A",
1796
- "B",
1797
- "C",
1798
- "D"
1799
- ],
1800
- "description": "以下是中国关于初中化学的单项选择题,请选出其中的正确答案。\n\n",
1801
- "target_delimiter": " ",
1802
- "fewshot_delimiter": "\n\n",
1803
- "fewshot_config": {
1804
- "sampler": "first_n"
1805
- },
1806
- "metric_list": [
1807
- {
1808
- "metric": "acc",
1809
- "aggregation": "mean",
1810
- "higher_is_better": true
1811
- },
1812
- {
1813
- "metric": "acc_norm",
1814
- "aggregation": "mean",
1815
- "higher_is_better": true
1816
- }
1817
- ],
1818
- "output_type": "multiple_choice",
1819
- "repeats": 1,
1820
- "should_decontaminate": false,
1821
- "metadata": {
1822
- "version": 1.0
1823
- }
1824
- },
1825
- "ceval-valid_middle_school_geography": {
1826
- "task": "ceval-valid_middle_school_geography",
1827
- "group": "ceval-valid",
1828
- "dataset_path": "ceval/ceval-exam",
1829
- "dataset_name": "middle_school_geography",
1830
- "validation_split": "val",
1831
- "fewshot_split": "dev",
1832
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1833
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1834
- "doc_to_choice": [
1835
- "A",
1836
- "B",
1837
- "C",
1838
- "D"
1839
- ],
1840
- "description": "以下是中国关于初中地理的单项选择题,请选出其中的正确答案。\n\n",
1841
- "target_delimiter": " ",
1842
- "fewshot_delimiter": "\n\n",
1843
- "fewshot_config": {
1844
- "sampler": "first_n"
1845
- },
1846
- "metric_list": [
1847
- {
1848
- "metric": "acc",
1849
- "aggregation": "mean",
1850
- "higher_is_better": true
1851
- },
1852
- {
1853
- "metric": "acc_norm",
1854
- "aggregation": "mean",
1855
- "higher_is_better": true
1856
- }
1857
- ],
1858
- "output_type": "multiple_choice",
1859
- "repeats": 1,
1860
- "should_decontaminate": false,
1861
- "metadata": {
1862
- "version": 1.0
1863
- }
1864
- },
1865
- "ceval-valid_middle_school_history": {
1866
- "task": "ceval-valid_middle_school_history",
1867
- "group": "ceval-valid",
1868
- "dataset_path": "ceval/ceval-exam",
1869
- "dataset_name": "middle_school_history",
1870
- "validation_split": "val",
1871
- "fewshot_split": "dev",
1872
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1873
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1874
- "doc_to_choice": [
1875
- "A",
1876
- "B",
1877
- "C",
1878
- "D"
1879
- ],
1880
- "description": "以下是中国关于初中历史的单项选择题,请选出其中的正确答案。\n\n",
1881
- "target_delimiter": " ",
1882
- "fewshot_delimiter": "\n\n",
1883
- "fewshot_config": {
1884
- "sampler": "first_n"
1885
- },
1886
- "metric_list": [
1887
- {
1888
- "metric": "acc",
1889
- "aggregation": "mean",
1890
- "higher_is_better": true
1891
- },
1892
- {
1893
- "metric": "acc_norm",
1894
- "aggregation": "mean",
1895
- "higher_is_better": true
1896
- }
1897
- ],
1898
- "output_type": "multiple_choice",
1899
- "repeats": 1,
1900
- "should_decontaminate": false,
1901
- "metadata": {
1902
- "version": 1.0
1903
- }
1904
- },
1905
- "ceval-valid_middle_school_mathematics": {
1906
- "task": "ceval-valid_middle_school_mathematics",
1907
- "group": "ceval-valid",
1908
- "dataset_path": "ceval/ceval-exam",
1909
- "dataset_name": "middle_school_mathematics",
1910
- "validation_split": "val",
1911
- "fewshot_split": "dev",
1912
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1913
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1914
- "doc_to_choice": [
1915
- "A",
1916
- "B",
1917
- "C",
1918
- "D"
1919
- ],
1920
- "description": "以下是中国关于初中数学的单项选择题,请选出其中的正确答案。\n\n",
1921
- "target_delimiter": " ",
1922
- "fewshot_delimiter": "\n\n",
1923
- "fewshot_config": {
1924
- "sampler": "first_n"
1925
- },
1926
- "metric_list": [
1927
- {
1928
- "metric": "acc",
1929
- "aggregation": "mean",
1930
- "higher_is_better": true
1931
- },
1932
- {
1933
- "metric": "acc_norm",
1934
- "aggregation": "mean",
1935
- "higher_is_better": true
1936
- }
1937
- ],
1938
- "output_type": "multiple_choice",
1939
- "repeats": 1,
1940
- "should_decontaminate": false,
1941
- "metadata": {
1942
- "version": 1.0
1943
- }
1944
- },
1945
- "ceval-valid_middle_school_physics": {
1946
- "task": "ceval-valid_middle_school_physics",
1947
- "group": "ceval-valid",
1948
- "dataset_path": "ceval/ceval-exam",
1949
- "dataset_name": "middle_school_physics",
1950
- "validation_split": "val",
1951
- "fewshot_split": "dev",
1952
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1953
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1954
- "doc_to_choice": [
1955
- "A",
1956
- "B",
1957
- "C",
1958
- "D"
1959
- ],
1960
- "description": "以下是中国关于初中物理的单项选择题,请选出其中的正确答案。\n\n",
1961
- "target_delimiter": " ",
1962
- "fewshot_delimiter": "\n\n",
1963
- "fewshot_config": {
1964
- "sampler": "first_n"
1965
- },
1966
- "metric_list": [
1967
- {
1968
- "metric": "acc",
1969
- "aggregation": "mean",
1970
- "higher_is_better": true
1971
- },
1972
- {
1973
- "metric": "acc_norm",
1974
- "aggregation": "mean",
1975
- "higher_is_better": true
1976
- }
1977
- ],
1978
- "output_type": "multiple_choice",
1979
- "repeats": 1,
1980
- "should_decontaminate": false,
1981
- "metadata": {
1982
- "version": 1.0
1983
- }
1984
- },
1985
- "ceval-valid_middle_school_politics": {
1986
- "task": "ceval-valid_middle_school_politics",
1987
- "group": "ceval-valid",
1988
- "dataset_path": "ceval/ceval-exam",
1989
- "dataset_name": "middle_school_politics",
1990
- "validation_split": "val",
1991
- "fewshot_split": "dev",
1992
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
1993
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
1994
- "doc_to_choice": [
1995
- "A",
1996
- "B",
1997
- "C",
1998
- "D"
1999
- ],
2000
- "description": "以下是中国关于初中政治的单项选择题,请选出其中的正确答案。\n\n",
2001
- "target_delimiter": " ",
2002
- "fewshot_delimiter": "\n\n",
2003
- "fewshot_config": {
2004
- "sampler": "first_n"
2005
- },
2006
- "metric_list": [
2007
- {
2008
- "metric": "acc",
2009
- "aggregation": "mean",
2010
- "higher_is_better": true
2011
- },
2012
- {
2013
- "metric": "acc_norm",
2014
- "aggregation": "mean",
2015
- "higher_is_better": true
2016
- }
2017
- ],
2018
- "output_type": "multiple_choice",
2019
- "repeats": 1,
2020
- "should_decontaminate": false,
2021
- "metadata": {
2022
- "version": 1.0
2023
- }
2024
- },
2025
- "ceval-valid_modern_chinese_history": {
2026
- "task": "ceval-valid_modern_chinese_history",
2027
- "group": "ceval-valid",
2028
- "dataset_path": "ceval/ceval-exam",
2029
- "dataset_name": "modern_chinese_history",
2030
- "validation_split": "val",
2031
- "fewshot_split": "dev",
2032
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2033
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2034
- "doc_to_choice": [
2035
- "A",
2036
- "B",
2037
- "C",
2038
- "D"
2039
- ],
2040
- "description": "以下是中国关于近代史纲要的单项选择题,请选出其中的正确答案。\n\n",
2041
- "target_delimiter": " ",
2042
- "fewshot_delimiter": "\n\n",
2043
- "fewshot_config": {
2044
- "sampler": "first_n"
2045
- },
2046
- "metric_list": [
2047
- {
2048
- "metric": "acc",
2049
- "aggregation": "mean",
2050
- "higher_is_better": true
2051
- },
2052
- {
2053
- "metric": "acc_norm",
2054
- "aggregation": "mean",
2055
- "higher_is_better": true
2056
- }
2057
- ],
2058
- "output_type": "multiple_choice",
2059
- "repeats": 1,
2060
- "should_decontaminate": false,
2061
- "metadata": {
2062
- "version": 1.0
2063
- }
2064
- },
2065
- "ceval-valid_operating_system": {
2066
- "task": "ceval-valid_operating_system",
2067
- "group": "ceval-valid",
2068
- "dataset_path": "ceval/ceval-exam",
2069
- "dataset_name": "operating_system",
2070
- "validation_split": "val",
2071
- "fewshot_split": "dev",
2072
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2073
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2074
- "doc_to_choice": [
2075
- "A",
2076
- "B",
2077
- "C",
2078
- "D"
2079
- ],
2080
- "description": "以下是中国关于操作系统的单项选择题,请选出其中的正确答案。\n\n",
2081
- "target_delimiter": " ",
2082
- "fewshot_delimiter": "\n\n",
2083
- "fewshot_config": {
2084
- "sampler": "first_n"
2085
- },
2086
- "metric_list": [
2087
- {
2088
- "metric": "acc",
2089
- "aggregation": "mean",
2090
- "higher_is_better": true
2091
- },
2092
- {
2093
- "metric": "acc_norm",
2094
- "aggregation": "mean",
2095
- "higher_is_better": true
2096
- }
2097
- ],
2098
- "output_type": "multiple_choice",
2099
- "repeats": 1,
2100
- "should_decontaminate": false,
2101
- "metadata": {
2102
- "version": 1.0
2103
- }
2104
- },
2105
- "ceval-valid_physician": {
2106
- "task": "ceval-valid_physician",
2107
- "group": "ceval-valid",
2108
- "dataset_path": "ceval/ceval-exam",
2109
- "dataset_name": "physician",
2110
- "validation_split": "val",
2111
- "fewshot_split": "dev",
2112
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2113
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2114
- "doc_to_choice": [
2115
- "A",
2116
- "B",
2117
- "C",
2118
- "D"
2119
- ],
2120
- "description": "以下是中国关于医师资格的单项选择题,请选出其中的正确答案。\n\n",
2121
- "target_delimiter": " ",
2122
- "fewshot_delimiter": "\n\n",
2123
- "fewshot_config": {
2124
- "sampler": "first_n"
2125
- },
2126
- "metric_list": [
2127
- {
2128
- "metric": "acc",
2129
- "aggregation": "mean",
2130
- "higher_is_better": true
2131
- },
2132
- {
2133
- "metric": "acc_norm",
2134
- "aggregation": "mean",
2135
- "higher_is_better": true
2136
- }
2137
- ],
2138
- "output_type": "multiple_choice",
2139
- "repeats": 1,
2140
- "should_decontaminate": false,
2141
- "metadata": {
2142
- "version": 1.0
2143
- }
2144
- },
2145
- "ceval-valid_plant_protection": {
2146
- "task": "ceval-valid_plant_protection",
2147
- "group": "ceval-valid",
2148
- "dataset_path": "ceval/ceval-exam",
2149
- "dataset_name": "plant_protection",
2150
- "validation_split": "val",
2151
- "fewshot_split": "dev",
2152
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2153
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2154
- "doc_to_choice": [
2155
- "A",
2156
- "B",
2157
- "C",
2158
- "D"
2159
- ],
2160
- "description": "以下是中国关于植物保护的单项选择题,请选出其中的正确答案。\n\n",
2161
- "target_delimiter": " ",
2162
- "fewshot_delimiter": "\n\n",
2163
- "fewshot_config": {
2164
- "sampler": "first_n"
2165
- },
2166
- "metric_list": [
2167
- {
2168
- "metric": "acc",
2169
- "aggregation": "mean",
2170
- "higher_is_better": true
2171
- },
2172
- {
2173
- "metric": "acc_norm",
2174
- "aggregation": "mean",
2175
- "higher_is_better": true
2176
- }
2177
- ],
2178
- "output_type": "multiple_choice",
2179
- "repeats": 1,
2180
- "should_decontaminate": false,
2181
- "metadata": {
2182
- "version": 1.0
2183
- }
2184
- },
2185
- "ceval-valid_probability_and_statistics": {
2186
- "task": "ceval-valid_probability_and_statistics",
2187
- "group": "ceval-valid",
2188
- "dataset_path": "ceval/ceval-exam",
2189
- "dataset_name": "probability_and_statistics",
2190
- "validation_split": "val",
2191
- "fewshot_split": "dev",
2192
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2193
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2194
- "doc_to_choice": [
2195
- "A",
2196
- "B",
2197
- "C",
2198
- "D"
2199
- ],
2200
- "description": "以下是中国关于概率统计的单项选择题,请选出其中的正确答案。\n\n",
2201
- "target_delimiter": " ",
2202
- "fewshot_delimiter": "\n\n",
2203
- "fewshot_config": {
2204
- "sampler": "first_n"
2205
- },
2206
- "metric_list": [
2207
- {
2208
- "metric": "acc",
2209
- "aggregation": "mean",
2210
- "higher_is_better": true
2211
- },
2212
- {
2213
- "metric": "acc_norm",
2214
- "aggregation": "mean",
2215
- "higher_is_better": true
2216
- }
2217
- ],
2218
- "output_type": "multiple_choice",
2219
- "repeats": 1,
2220
- "should_decontaminate": false,
2221
- "metadata": {
2222
- "version": 1.0
2223
- }
2224
- },
2225
- "ceval-valid_professional_tour_guide": {
2226
- "task": "ceval-valid_professional_tour_guide",
2227
- "group": "ceval-valid",
2228
- "dataset_path": "ceval/ceval-exam",
2229
- "dataset_name": "professional_tour_guide",
2230
- "validation_split": "val",
2231
- "fewshot_split": "dev",
2232
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2233
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2234
- "doc_to_choice": [
2235
- "A",
2236
- "B",
2237
- "C",
2238
- "D"
2239
- ],
2240
- "description": "以下是中国关于导游资格的单项选择题,请选出其中的正确答案。\n\n",
2241
- "target_delimiter": " ",
2242
- "fewshot_delimiter": "\n\n",
2243
- "fewshot_config": {
2244
- "sampler": "first_n"
2245
- },
2246
- "metric_list": [
2247
- {
2248
- "metric": "acc",
2249
- "aggregation": "mean",
2250
- "higher_is_better": true
2251
- },
2252
- {
2253
- "metric": "acc_norm",
2254
- "aggregation": "mean",
2255
- "higher_is_better": true
2256
- }
2257
- ],
2258
- "output_type": "multiple_choice",
2259
- "repeats": 1,
2260
- "should_decontaminate": false,
2261
- "metadata": {
2262
- "version": 1.0
2263
- }
2264
- },
2265
- "ceval-valid_sports_science": {
2266
- "task": "ceval-valid_sports_science",
2267
- "group": "ceval-valid",
2268
- "dataset_path": "ceval/ceval-exam",
2269
- "dataset_name": "sports_science",
2270
- "validation_split": "val",
2271
- "fewshot_split": "dev",
2272
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2273
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2274
- "doc_to_choice": [
2275
- "A",
2276
- "B",
2277
- "C",
2278
- "D"
2279
- ],
2280
- "description": "以下是中国关于体育学的单项选择题,请选出其中的正确答案。\n\n",
2281
- "target_delimiter": " ",
2282
- "fewshot_delimiter": "\n\n",
2283
- "fewshot_config": {
2284
- "sampler": "first_n"
2285
- },
2286
- "metric_list": [
2287
- {
2288
- "metric": "acc",
2289
- "aggregation": "mean",
2290
- "higher_is_better": true
2291
- },
2292
- {
2293
- "metric": "acc_norm",
2294
- "aggregation": "mean",
2295
- "higher_is_better": true
2296
- }
2297
- ],
2298
- "output_type": "multiple_choice",
2299
- "repeats": 1,
2300
- "should_decontaminate": false,
2301
- "metadata": {
2302
- "version": 1.0
2303
- }
2304
- },
2305
- "ceval-valid_tax_accountant": {
2306
- "task": "ceval-valid_tax_accountant",
2307
- "group": "ceval-valid",
2308
- "dataset_path": "ceval/ceval-exam",
2309
- "dataset_name": "tax_accountant",
2310
- "validation_split": "val",
2311
- "fewshot_split": "dev",
2312
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2313
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2314
- "doc_to_choice": [
2315
- "A",
2316
- "B",
2317
- "C",
2318
- "D"
2319
- ],
2320
- "description": "以下是中国关于税务师的单项选择题,请选出其中的正确答案。\n\n",
2321
- "target_delimiter": " ",
2322
- "fewshot_delimiter": "\n\n",
2323
- "fewshot_config": {
2324
- "sampler": "first_n"
2325
- },
2326
- "metric_list": [
2327
- {
2328
- "metric": "acc",
2329
- "aggregation": "mean",
2330
- "higher_is_better": true
2331
- },
2332
- {
2333
- "metric": "acc_norm",
2334
- "aggregation": "mean",
2335
- "higher_is_better": true
2336
- }
2337
- ],
2338
- "output_type": "multiple_choice",
2339
- "repeats": 1,
2340
- "should_decontaminate": false,
2341
- "metadata": {
2342
- "version": 1.0
2343
- }
2344
- },
2345
- "ceval-valid_teacher_qualification": {
2346
- "task": "ceval-valid_teacher_qualification",
2347
- "group": "ceval-valid",
2348
- "dataset_path": "ceval/ceval-exam",
2349
- "dataset_name": "teacher_qualification",
2350
- "validation_split": "val",
2351
- "fewshot_split": "dev",
2352
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2353
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2354
- "doc_to_choice": [
2355
- "A",
2356
- "B",
2357
- "C",
2358
- "D"
2359
- ],
2360
- "description": "以下是中国关于教师资格的单项选择题,请选出其中的正确答案。\n\n",
2361
- "target_delimiter": " ",
2362
- "fewshot_delimiter": "\n\n",
2363
- "fewshot_config": {
2364
- "sampler": "first_n"
2365
- },
2366
- "metric_list": [
2367
- {
2368
- "metric": "acc",
2369
- "aggregation": "mean",
2370
- "higher_is_better": true
2371
- },
2372
- {
2373
- "metric": "acc_norm",
2374
- "aggregation": "mean",
2375
- "higher_is_better": true
2376
- }
2377
- ],
2378
- "output_type": "multiple_choice",
2379
- "repeats": 1,
2380
- "should_decontaminate": false,
2381
- "metadata": {
2382
- "version": 1.0
2383
- }
2384
- },
2385
- "ceval-valid_urban_and_rural_planner": {
2386
- "task": "ceval-valid_urban_and_rural_planner",
2387
- "group": "ceval-valid",
2388
- "dataset_path": "ceval/ceval-exam",
2389
- "dataset_name": "urban_and_rural_planner",
2390
- "validation_split": "val",
2391
- "fewshot_split": "dev",
2392
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2393
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2394
- "doc_to_choice": [
2395
- "A",
2396
- "B",
2397
- "C",
2398
- "D"
2399
- ],
2400
- "description": "以下是中国关于注册城乡规划师的单项选择题,请选出其中的正确答案。\n\n",
2401
- "target_delimiter": " ",
2402
- "fewshot_delimiter": "\n\n",
2403
- "fewshot_config": {
2404
- "sampler": "first_n"
2405
- },
2406
- "metric_list": [
2407
- {
2408
- "metric": "acc",
2409
- "aggregation": "mean",
2410
- "higher_is_better": true
2411
- },
2412
- {
2413
- "metric": "acc_norm",
2414
- "aggregation": "mean",
2415
- "higher_is_better": true
2416
- }
2417
- ],
2418
- "output_type": "multiple_choice",
2419
- "repeats": 1,
2420
- "should_decontaminate": false,
2421
- "metadata": {
2422
- "version": 1.0
2423
- }
2424
- },
2425
- "ceval-valid_veterinary_medicine": {
2426
- "task": "ceval-valid_veterinary_medicine",
2427
- "group": "ceval-valid",
2428
- "dataset_path": "ceval/ceval-exam",
2429
- "dataset_name": "veterinary_medicine",
2430
- "validation_split": "val",
2431
- "fewshot_split": "dev",
2432
- "doc_to_text": "{{question.strip()}}\nA. {{A}}\nB. {{B}}\nC. {{C}}\nD. {{D}}\n答案:",
2433
- "doc_to_target": "{{['A', 'B', 'C', 'D'].index(answer)}}",
2434
- "doc_to_choice": [
2435
- "A",
2436
- "B",
2437
- "C",
2438
- "D"
2439
- ],
2440
- "description": "以下是中国关于兽医学的单项选择题,请选出其中的正确答案。\n\n",
2441
- "target_delimiter": " ",
2442
- "fewshot_delimiter": "\n\n",
2443
- "fewshot_config": {
2444
- "sampler": "first_n"
2445
- },
2446
- "metric_list": [
2447
- {
2448
- "metric": "acc",
2449
- "aggregation": "mean",
2450
- "higher_is_better": true
2451
- },
2452
- {
2453
- "metric": "acc_norm",
2454
- "aggregation": "mean",
2455
- "higher_is_better": true
2456
- }
2457
- ],
2458
- "output_type": "multiple_choice",
2459
- "repeats": 1,
2460
- "should_decontaminate": false,
2461
- "metadata": {
2462
- "version": 1.0
2463
- }
2464
- }
2465
- },
2466
- "versions": {
2467
- "ceval-valid": "N/A",
2468
- "ceval-valid_accountant": 1.0,
2469
- "ceval-valid_advanced_mathematics": 1.0,
2470
- "ceval-valid_art_studies": 1.0,
2471
- "ceval-valid_basic_medicine": 1.0,
2472
- "ceval-valid_business_administration": 1.0,
2473
- "ceval-valid_chinese_language_and_literature": 1.0,
2474
- "ceval-valid_civil_servant": 1.0,
2475
- "ceval-valid_clinical_medicine": 1.0,
2476
- "ceval-valid_college_chemistry": 1.0,
2477
- "ceval-valid_college_economics": 1.0,
2478
- "ceval-valid_college_physics": 1.0,
2479
- "ceval-valid_college_programming": 1.0,
2480
- "ceval-valid_computer_architecture": 1.0,
2481
- "ceval-valid_computer_network": 1.0,
2482
- "ceval-valid_discrete_mathematics": 1.0,
2483
- "ceval-valid_education_science": 1.0,
2484
- "ceval-valid_electrical_engineer": 1.0,
2485
- "ceval-valid_environmental_impact_assessment_engineer": 1.0,
2486
- "ceval-valid_fire_engineer": 1.0,
2487
- "ceval-valid_high_school_biology": 1.0,
2488
- "ceval-valid_high_school_chemistry": 1.0,
2489
- "ceval-valid_high_school_chinese": 1.0,
2490
- "ceval-valid_high_school_geography": 1.0,
2491
- "ceval-valid_high_school_history": 1.0,
2492
- "ceval-valid_high_school_mathematics": 1.0,
2493
- "ceval-valid_high_school_physics": 1.0,
2494
- "ceval-valid_high_school_politics": 1.0,
2495
- "ceval-valid_ideological_and_moral_cultivation": 1.0,
2496
- "ceval-valid_law": 1.0,
2497
- "ceval-valid_legal_professional": 1.0,
2498
- "ceval-valid_logic": 1.0,
2499
- "ceval-valid_mao_zedong_thought": 1.0,
2500
- "ceval-valid_marxism": 1.0,
2501
- "ceval-valid_metrology_engineer": 1.0,
2502
- "ceval-valid_middle_school_biology": 1.0,
2503
- "ceval-valid_middle_school_chemistry": 1.0,
2504
- "ceval-valid_middle_school_geography": 1.0,
2505
- "ceval-valid_middle_school_history": 1.0,
2506
- "ceval-valid_middle_school_mathematics": 1.0,
2507
- "ceval-valid_middle_school_physics": 1.0,
2508
- "ceval-valid_middle_school_politics": 1.0,
2509
- "ceval-valid_modern_chinese_history": 1.0,
2510
- "ceval-valid_operating_system": 1.0,
2511
- "ceval-valid_physician": 1.0,
2512
- "ceval-valid_plant_protection": 1.0,
2513
- "ceval-valid_probability_and_statistics": 1.0,
2514
- "ceval-valid_professional_tour_guide": 1.0,
2515
- "ceval-valid_sports_science": 1.0,
2516
- "ceval-valid_tax_accountant": 1.0,
2517
- "ceval-valid_teacher_qualification": 1.0,
2518
- "ceval-valid_urban_and_rural_planner": 1.0,
2519
- "ceval-valid_veterinary_medicine": 1.0
2520
- },
2521
- "n-shot": {
2522
- "ceval-valid": 0,
2523
- "ceval-valid_accountant": 0,
2524
- "ceval-valid_advanced_mathematics": 0,
2525
- "ceval-valid_art_studies": 0,
2526
- "ceval-valid_basic_medicine": 0,
2527
- "ceval-valid_business_administration": 0,
2528
- "ceval-valid_chinese_language_and_literature": 0,
2529
- "ceval-valid_civil_servant": 0,
2530
- "ceval-valid_clinical_medicine": 0,
2531
- "ceval-valid_college_chemistry": 0,
2532
- "ceval-valid_college_economics": 0,
2533
- "ceval-valid_college_physics": 0,
2534
- "ceval-valid_college_programming": 0,
2535
- "ceval-valid_computer_architecture": 0,
2536
- "ceval-valid_computer_network": 0,
2537
- "ceval-valid_discrete_mathematics": 0,
2538
- "ceval-valid_education_science": 0,
2539
- "ceval-valid_electrical_engineer": 0,
2540
- "ceval-valid_environmental_impact_assessment_engineer": 0,
2541
- "ceval-valid_fire_engineer": 0,
2542
- "ceval-valid_high_school_biology": 0,
2543
- "ceval-valid_high_school_chemistry": 0,
2544
- "ceval-valid_high_school_chinese": 0,
2545
- "ceval-valid_high_school_geography": 0,
2546
- "ceval-valid_high_school_history": 0,
2547
- "ceval-valid_high_school_mathematics": 0,
2548
- "ceval-valid_high_school_physics": 0,
2549
- "ceval-valid_high_school_politics": 0,
2550
- "ceval-valid_ideological_and_moral_cultivation": 0,
2551
- "ceval-valid_law": 0,
2552
- "ceval-valid_legal_professional": 0,
2553
- "ceval-valid_logic": 0,
2554
- "ceval-valid_mao_zedong_thought": 0,
2555
- "ceval-valid_marxism": 0,
2556
- "ceval-valid_metrology_engineer": 0,
2557
- "ceval-valid_middle_school_biology": 0,
2558
- "ceval-valid_middle_school_chemistry": 0,
2559
- "ceval-valid_middle_school_geography": 0,
2560
- "ceval-valid_middle_school_history": 0,
2561
- "ceval-valid_middle_school_mathematics": 0,
2562
- "ceval-valid_middle_school_physics": 0,
2563
- "ceval-valid_middle_school_politics": 0,
2564
- "ceval-valid_modern_chinese_history": 0,
2565
- "ceval-valid_operating_system": 0,
2566
- "ceval-valid_physician": 0,
2567
- "ceval-valid_plant_protection": 0,
2568
- "ceval-valid_probability_and_statistics": 0,
2569
- "ceval-valid_professional_tour_guide": 0,
2570
- "ceval-valid_sports_science": 0,
2571
- "ceval-valid_tax_accountant": 0,
2572
- "ceval-valid_teacher_qualification": 0,
2573
- "ceval-valid_urban_and_rural_planner": 0,
2574
- "ceval-valid_veterinary_medicine": 0
2575
- },
2576
- "config": {
2577
- "model": "hf",
2578
- "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
2579
- "batch_size": "2",
2580
- "batch_sizes": [],
2581
- "device": null,
2582
- "use_cache": null,
2583
- "limit": null,
2584
- "bootstrap_iters": 100000,
2585
- "gen_kwargs": null
2586
- },
2587
- "git_hash": "4701655"
2588
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
lm-eval-output/allenai/OLMo-7B/ceval-valid/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b5ee2c65a8b0c7e2a23e6f37952f1caa52bc173fea074a725a634d7b0194f619
3
- size 64855
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f000647be6eea5529f603c75503e3332301c0bd4aa7f76edcbbb38001ab4cc9c
3
+ size 79965
lm-eval-output/allenai/OLMo-7B/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d49be0dd3eb66a0025a1d0cdd953772d8bdfc3cd910d47adcca1eaa055d00007
3
- size 2305507
 
 
 
 
lm-eval-output/allenai/OLMo-7B/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,38 +1,38 @@
1
  {
2
  "results": {
3
  "cmmlu": {
4
- "acc,none": 0.2509065791745812,
5
- "acc_stderr,none": 0.046349557976842405,
6
- "acc_norm,none": 0.2509065791745812,
7
- "acc_norm_stderr,none": 0.046349557976842405,
8
  "alias": "cmmlu"
9
  },
10
  "cmmlu_agronomy": {
11
- "acc,none": 0.17751479289940827,
12
- "acc_stderr,none": 0.029479945887526282,
13
- "acc_norm,none": 0.17751479289940827,
14
- "acc_norm_stderr,none": 0.029479945887526282,
15
  "alias": " - cmmlu_agronomy"
16
  },
17
  "cmmlu_anatomy": {
18
- "acc,none": 0.24324324324324326,
19
- "acc_stderr,none": 0.0353866849031339,
20
- "acc_norm,none": 0.24324324324324326,
21
- "acc_norm_stderr,none": 0.0353866849031339,
22
  "alias": " - cmmlu_anatomy"
23
  },
24
  "cmmlu_ancient_chinese": {
25
- "acc,none": 0.23170731707317074,
26
- "acc_stderr,none": 0.033047561588107864,
27
- "acc_norm,none": 0.23170731707317074,
28
- "acc_norm_stderr,none": 0.033047561588107864,
29
  "alias": " - cmmlu_ancient_chinese"
30
  },
31
  "cmmlu_arts": {
32
- "acc,none": 0.3,
33
- "acc_stderr,none": 0.036342189215581536,
34
- "acc_norm,none": 0.3,
35
- "acc_norm_stderr,none": 0.036342189215581536,
36
  "alias": " - cmmlu_arts"
37
  },
38
  "cmmlu_astronomy": {
@@ -43,122 +43,122 @@
43
  "alias": " - cmmlu_astronomy"
44
  },
45
  "cmmlu_business_ethics": {
46
- "acc,none": 0.23923444976076555,
47
- "acc_stderr,none": 0.029580506819430464,
48
- "acc_norm,none": 0.23923444976076555,
49
- "acc_norm_stderr,none": 0.029580506819430464,
50
  "alias": " - cmmlu_business_ethics"
51
  },
52
  "cmmlu_chinese_civil_service_exam": {
53
- "acc,none": 0.20625,
54
- "acc_stderr,none": 0.03208782538184617,
55
- "acc_norm,none": 0.20625,
56
- "acc_norm_stderr,none": 0.03208782538184617,
57
  "alias": " - cmmlu_chinese_civil_service_exam"
58
  },
59
  "cmmlu_chinese_driving_rule": {
60
- "acc,none": 0.2366412213740458,
61
- "acc_stderr,none": 0.03727673575596917,
62
- "acc_norm,none": 0.2366412213740458,
63
- "acc_norm_stderr,none": 0.03727673575596917,
64
  "alias": " - cmmlu_chinese_driving_rule"
65
  },
66
  "cmmlu_chinese_food_culture": {
67
- "acc,none": 0.21323529411764705,
68
- "acc_stderr,none": 0.035252108259539325,
69
- "acc_norm,none": 0.21323529411764705,
70
- "acc_norm_stderr,none": 0.035252108259539325,
71
  "alias": " - cmmlu_chinese_food_culture"
72
  },
73
  "cmmlu_chinese_foreign_policy": {
74
- "acc,none": 0.205607476635514,
75
- "acc_stderr,none": 0.03925401580070485,
76
- "acc_norm,none": 0.205607476635514,
77
- "acc_norm_stderr,none": 0.03925401580070485,
78
  "alias": " - cmmlu_chinese_foreign_policy"
79
  },
80
  "cmmlu_chinese_history": {
81
- "acc,none": 0.2848297213622291,
82
- "acc_stderr,none": 0.02515182168617951,
83
- "acc_norm,none": 0.2848297213622291,
84
- "acc_norm_stderr,none": 0.02515182168617951,
85
  "alias": " - cmmlu_chinese_history"
86
  },
87
  "cmmlu_chinese_literature": {
88
- "acc,none": 0.2107843137254902,
89
- "acc_stderr,none": 0.02862654791243739,
90
- "acc_norm,none": 0.2107843137254902,
91
- "acc_norm_stderr,none": 0.02862654791243739,
92
  "alias": " - cmmlu_chinese_literature"
93
  },
94
  "cmmlu_chinese_teacher_qualification": {
95
- "acc,none": 0.24022346368715083,
96
- "acc_stderr,none": 0.032021424638044936,
97
- "acc_norm,none": 0.24022346368715083,
98
- "acc_norm_stderr,none": 0.032021424638044936,
99
  "alias": " - cmmlu_chinese_teacher_qualification"
100
  },
101
  "cmmlu_clinical_knowledge": {
102
- "acc,none": 0.24472573839662448,
103
- "acc_stderr,none": 0.027985699387036406,
104
- "acc_norm,none": 0.24472573839662448,
105
- "acc_norm_stderr,none": 0.027985699387036406,
106
  "alias": " - cmmlu_clinical_knowledge"
107
  },
108
  "cmmlu_college_actuarial_science": {
109
- "acc,none": 0.3018867924528302,
110
- "acc_stderr,none": 0.044801270921106716,
111
- "acc_norm,none": 0.3018867924528302,
112
- "acc_norm_stderr,none": 0.044801270921106716,
113
  "alias": " - cmmlu_college_actuarial_science"
114
  },
115
  "cmmlu_college_education": {
116
- "acc,none": 0.308411214953271,
117
- "acc_stderr,none": 0.04485760883316698,
118
- "acc_norm,none": 0.308411214953271,
119
- "acc_norm_stderr,none": 0.04485760883316698,
120
  "alias": " - cmmlu_college_education"
121
  },
122
  "cmmlu_college_engineering_hydrology": {
123
- "acc,none": 0.2830188679245283,
124
- "acc_stderr,none": 0.04396093377439375,
125
- "acc_norm,none": 0.2830188679245283,
126
- "acc_norm_stderr,none": 0.04396093377439375,
127
  "alias": " - cmmlu_college_engineering_hydrology"
128
  },
129
  "cmmlu_college_law": {
130
- "acc,none": 0.28703703703703703,
131
- "acc_stderr,none": 0.043733130409147614,
132
- "acc_norm,none": 0.28703703703703703,
133
- "acc_norm_stderr,none": 0.043733130409147614,
134
  "alias": " - cmmlu_college_law"
135
  },
136
  "cmmlu_college_mathematics": {
137
- "acc,none": 0.3142857142857143,
138
- "acc_stderr,none": 0.045521571818039494,
139
- "acc_norm,none": 0.3142857142857143,
140
- "acc_norm_stderr,none": 0.045521571818039494,
141
  "alias": " - cmmlu_college_mathematics"
142
  },
143
  "cmmlu_college_medical_statistics": {
144
- "acc,none": 0.18867924528301888,
145
- "acc_stderr,none": 0.0381824426969915,
146
- "acc_norm,none": 0.18867924528301888,
147
- "acc_norm_stderr,none": 0.0381824426969915,
148
  "alias": " - cmmlu_college_medical_statistics"
149
  },
150
  "cmmlu_college_medicine": {
151
- "acc,none": 0.21978021978021978,
152
- "acc_stderr,none": 0.025108358900325773,
153
- "acc_norm,none": 0.21978021978021978,
154
- "acc_norm_stderr,none": 0.025108358900325773,
155
  "alias": " - cmmlu_college_medicine"
156
  },
157
  "cmmlu_computer_science": {
158
- "acc,none": 0.23039215686274508,
159
- "acc_stderr,none": 0.029554292605695077,
160
- "acc_norm,none": 0.23039215686274508,
161
- "acc_norm_stderr,none": 0.029554292605695077,
162
  "alias": " - cmmlu_computer_science"
163
  },
164
  "cmmlu_computer_security": {
@@ -169,45 +169,45 @@
169
  "alias": " - cmmlu_computer_security"
170
  },
171
  "cmmlu_conceptual_physics": {
172
- "acc,none": 0.2108843537414966,
173
- "acc_stderr,none": 0.033761060398578915,
174
- "acc_norm,none": 0.2108843537414966,
175
- "acc_norm_stderr,none": 0.033761060398578915,
176
  "alias": " - cmmlu_conceptual_physics"
177
  },
178
  "cmmlu_construction_project_management": {
179
- "acc,none": 0.302158273381295,
180
- "acc_stderr,none": 0.03908914479291562,
181
- "acc_norm,none": 0.302158273381295,
182
- "acc_norm_stderr,none": 0.03908914479291562,
183
  "alias": " - cmmlu_construction_project_management"
184
  },
185
  "cmmlu_economics": {
186
- "acc,none": 0.29559748427672955,
187
- "acc_stderr,none": 0.036302143777231344,
188
- "acc_norm,none": 0.29559748427672955,
189
- "acc_norm_stderr,none": 0.036302143777231344,
190
  "alias": " - cmmlu_economics"
191
  },
192
  "cmmlu_education": {
193
- "acc,none": 0.3006134969325153,
194
- "acc_stderr,none": 0.03602511318806771,
195
- "acc_norm,none": 0.3006134969325153,
196
- "acc_norm_stderr,none": 0.03602511318806771,
197
  "alias": " - cmmlu_education"
198
  },
199
  "cmmlu_electrical_engineering": {
200
- "acc,none": 0.26744186046511625,
201
- "acc_stderr,none": 0.03384836428157859,
202
- "acc_norm,none": 0.26744186046511625,
203
- "acc_norm_stderr,none": 0.03384836428157859,
204
  "alias": " - cmmlu_electrical_engineering"
205
  },
206
  "cmmlu_elementary_chinese": {
207
- "acc,none": 0.24206349206349206,
208
- "acc_stderr,none": 0.027036109679236968,
209
- "acc_norm,none": 0.24206349206349206,
210
- "acc_norm_stderr,none": 0.027036109679236968,
211
  "alias": " - cmmlu_elementary_chinese"
212
  },
213
  "cmmlu_elementary_commonsense": {
@@ -218,10 +218,10 @@
218
  "alias": " - cmmlu_elementary_commonsense"
219
  },
220
  "cmmlu_elementary_information_and_technology": {
221
- "acc,none": 0.27310924369747897,
222
- "acc_stderr,none": 0.028942004040998167,
223
- "acc_norm,none": 0.27310924369747897,
224
- "acc_norm_stderr,none": 0.028942004040998167,
225
  "alias": " - cmmlu_elementary_information_and_technology"
226
  },
227
  "cmmlu_elementary_mathematics": {
@@ -232,66 +232,66 @@
232
  "alias": " - cmmlu_elementary_mathematics"
233
  },
234
  "cmmlu_ethnology": {
235
- "acc,none": 0.2740740740740741,
236
- "acc_stderr,none": 0.03853254836552003,
237
- "acc_norm,none": 0.2740740740740741,
238
- "acc_norm_stderr,none": 0.03853254836552003,
239
  "alias": " - cmmlu_ethnology"
240
  },
241
  "cmmlu_food_science": {
242
- "acc,none": 0.27972027972027974,
243
- "acc_stderr,none": 0.037667638895398536,
244
- "acc_norm,none": 0.27972027972027974,
245
- "acc_norm_stderr,none": 0.037667638895398536,
246
  "alias": " - cmmlu_food_science"
247
  },
248
  "cmmlu_genetics": {
249
- "acc,none": 0.2784090909090909,
250
- "acc_stderr,none": 0.03388193526335356,
251
- "acc_norm,none": 0.2784090909090909,
252
- "acc_norm_stderr,none": 0.03388193526335356,
253
  "alias": " - cmmlu_genetics"
254
  },
255
  "cmmlu_global_facts": {
256
- "acc,none": 0.24161073825503357,
257
- "acc_stderr,none": 0.03518627932594346,
258
- "acc_norm,none": 0.24161073825503357,
259
- "acc_norm_stderr,none": 0.03518627932594346,
260
  "alias": " - cmmlu_global_facts"
261
  },
262
  "cmmlu_high_school_biology": {
263
- "acc,none": 0.22485207100591717,
264
- "acc_stderr,none": 0.03220965704514525,
265
- "acc_norm,none": 0.22485207100591717,
266
- "acc_norm_stderr,none": 0.03220965704514525,
267
  "alias": " - cmmlu_high_school_biology"
268
  },
269
  "cmmlu_high_school_chemistry": {
270
- "acc,none": 0.25757575757575757,
271
- "acc_stderr,none": 0.038206998148497956,
272
- "acc_norm,none": 0.25757575757575757,
273
- "acc_norm_stderr,none": 0.038206998148497956,
274
  "alias": " - cmmlu_high_school_chemistry"
275
  },
276
  "cmmlu_high_school_geography": {
277
- "acc,none": 0.2711864406779661,
278
- "acc_stderr,none": 0.04110070549339208,
279
- "acc_norm,none": 0.2711864406779661,
280
- "acc_norm_stderr,none": 0.04110070549339208,
281
  "alias": " - cmmlu_high_school_geography"
282
  },
283
  "cmmlu_high_school_mathematics": {
284
- "acc,none": 0.27439024390243905,
285
- "acc_stderr,none": 0.03494959016177541,
286
- "acc_norm,none": 0.27439024390243905,
287
- "acc_norm_stderr,none": 0.03494959016177541,
288
  "alias": " - cmmlu_high_school_mathematics"
289
  },
290
  "cmmlu_high_school_physics": {
291
- "acc,none": 0.2636363636363636,
292
- "acc_stderr,none": 0.04220224692971987,
293
- "acc_norm,none": 0.2636363636363636,
294
- "acc_norm_stderr,none": 0.04220224692971987,
295
  "alias": " - cmmlu_high_school_physics"
296
  },
297
  "cmmlu_high_school_politics": {
@@ -302,129 +302,129 @@
302
  "alias": " - cmmlu_high_school_politics"
303
  },
304
  "cmmlu_human_sexuality": {
305
- "acc,none": 0.1984126984126984,
306
- "acc_stderr,none": 0.03567016675276863,
307
- "acc_norm,none": 0.1984126984126984,
308
- "acc_norm_stderr,none": 0.03567016675276863,
309
  "alias": " - cmmlu_human_sexuality"
310
  },
311
  "cmmlu_international_law": {
312
- "acc,none": 0.2648648648648649,
313
- "acc_stderr,none": 0.032530209055933366,
314
- "acc_norm,none": 0.2648648648648649,
315
- "acc_norm_stderr,none": 0.032530209055933366,
316
  "alias": " - cmmlu_international_law"
317
  },
318
  "cmmlu_journalism": {
319
- "acc,none": 0.20930232558139536,
320
- "acc_stderr,none": 0.031109583909764642,
321
- "acc_norm,none": 0.20930232558139536,
322
- "acc_norm_stderr,none": 0.031109583909764642,
323
  "alias": " - cmmlu_journalism"
324
  },
325
  "cmmlu_jurisprudence": {
326
- "acc,none": 0.2871046228710462,
327
- "acc_stderr,none": 0.02234297829335579,
328
- "acc_norm,none": 0.2871046228710462,
329
- "acc_norm_stderr,none": 0.02234297829335579,
330
  "alias": " - cmmlu_jurisprudence"
331
  },
332
  "cmmlu_legal_and_moral_basis": {
333
- "acc,none": 0.3130841121495327,
334
- "acc_stderr,none": 0.03177550735912672,
335
- "acc_norm,none": 0.3130841121495327,
336
- "acc_norm_stderr,none": 0.03177550735912672,
337
  "alias": " - cmmlu_legal_and_moral_basis"
338
  },
339
  "cmmlu_logical": {
340
- "acc,none": 0.2682926829268293,
341
- "acc_stderr,none": 0.040113743936211456,
342
- "acc_norm,none": 0.2682926829268293,
343
- "acc_norm_stderr,none": 0.040113743936211456,
344
  "alias": " - cmmlu_logical"
345
  },
346
  "cmmlu_machine_learning": {
347
- "acc,none": 0.3114754098360656,
348
- "acc_stderr,none": 0.0420996926731014,
349
- "acc_norm,none": 0.3114754098360656,
350
- "acc_norm_stderr,none": 0.0420996926731014,
351
  "alias": " - cmmlu_machine_learning"
352
  },
353
  "cmmlu_management": {
354
- "acc,none": 0.18571428571428572,
355
- "acc_stderr,none": 0.026899110619750637,
356
- "acc_norm,none": 0.18571428571428572,
357
- "acc_norm_stderr,none": 0.026899110619750637,
358
  "alias": " - cmmlu_management"
359
  },
360
  "cmmlu_marketing": {
361
- "acc,none": 0.2388888888888889,
362
- "acc_stderr,none": 0.03187098535605761,
363
- "acc_norm,none": 0.2388888888888889,
364
- "acc_norm_stderr,none": 0.03187098535605761,
365
  "alias": " - cmmlu_marketing"
366
  },
367
  "cmmlu_marxist_theory": {
368
- "acc,none": 0.25396825396825395,
369
- "acc_stderr,none": 0.03174603174603175,
370
- "acc_norm,none": 0.25396825396825395,
371
- "acc_norm_stderr,none": 0.03174603174603175,
372
  "alias": " - cmmlu_marxist_theory"
373
  },
374
  "cmmlu_modern_chinese": {
375
- "acc,none": 0.22413793103448276,
376
- "acc_stderr,none": 0.03888669370117824,
377
- "acc_norm,none": 0.22413793103448276,
378
- "acc_norm_stderr,none": 0.03888669370117824,
379
  "alias": " - cmmlu_modern_chinese"
380
  },
381
  "cmmlu_nutrition": {
382
- "acc,none": 0.2620689655172414,
383
- "acc_stderr,none": 0.036646663372252565,
384
- "acc_norm,none": 0.2620689655172414,
385
- "acc_norm_stderr,none": 0.036646663372252565,
386
  "alias": " - cmmlu_nutrition"
387
  },
388
  "cmmlu_philosophy": {
389
- "acc,none": 0.3142857142857143,
390
- "acc_stderr,none": 0.045521571818039494,
391
- "acc_norm,none": 0.3142857142857143,
392
- "acc_norm_stderr,none": 0.045521571818039494,
393
  "alias": " - cmmlu_philosophy"
394
  },
395
  "cmmlu_professional_accounting": {
396
- "acc,none": 0.25142857142857145,
397
- "acc_stderr,none": 0.032888897342098204,
398
- "acc_norm,none": 0.25142857142857145,
399
- "acc_norm_stderr,none": 0.032888897342098204,
400
  "alias": " - cmmlu_professional_accounting"
401
  },
402
  "cmmlu_professional_law": {
403
- "acc,none": 0.2890995260663507,
404
- "acc_stderr,none": 0.03128372390561387,
405
- "acc_norm,none": 0.2890995260663507,
406
- "acc_norm_stderr,none": 0.03128372390561387,
407
  "alias": " - cmmlu_professional_law"
408
  },
409
  "cmmlu_professional_medicine": {
410
- "acc,none": 0.2579787234042553,
411
- "acc_stderr,none": 0.022593550801056256,
412
- "acc_norm,none": 0.2579787234042553,
413
- "acc_norm_stderr,none": 0.022593550801056256,
414
  "alias": " - cmmlu_professional_medicine"
415
  },
416
  "cmmlu_professional_psychology": {
417
- "acc,none": 0.19827586206896552,
418
- "acc_stderr,none": 0.02623260459197056,
419
- "acc_norm,none": 0.19827586206896552,
420
- "acc_norm_stderr,none": 0.02623260459197056,
421
  "alias": " - cmmlu_professional_psychology"
422
  },
423
  "cmmlu_public_relations": {
424
- "acc,none": 0.27586206896551724,
425
- "acc_stderr,none": 0.03398079939585583,
426
- "acc_norm,none": 0.27586206896551724,
427
- "acc_norm_stderr,none": 0.03398079939585583,
428
  "alias": " - cmmlu_public_relations"
429
  },
430
  "cmmlu_security_study": {
@@ -435,31 +435,31 @@
435
  "alias": " - cmmlu_security_study"
436
  },
437
  "cmmlu_sociology": {
438
- "acc,none": 0.23893805309734514,
439
- "acc_stderr,none": 0.02842898832603367,
440
- "acc_norm,none": 0.23893805309734514,
441
- "acc_norm_stderr,none": 0.02842898832603367,
442
  "alias": " - cmmlu_sociology"
443
  },
444
  "cmmlu_sports_science": {
445
- "acc,none": 0.2606060606060606,
446
- "acc_stderr,none": 0.034277431758165236,
447
- "acc_norm,none": 0.2606060606060606,
448
- "acc_norm_stderr,none": 0.034277431758165236,
449
  "alias": " - cmmlu_sports_science"
450
  },
451
  "cmmlu_traditional_chinese_medicine": {
452
  "acc,none": 0.23243243243243245,
453
- "acc_stderr,none": 0.03113850517079465,
454
  "acc_norm,none": 0.23243243243243245,
455
- "acc_norm_stderr,none": 0.03113850517079465,
456
  "alias": " - cmmlu_traditional_chinese_medicine"
457
  },
458
  "cmmlu_virology": {
459
- "acc,none": 0.2485207100591716,
460
- "acc_stderr,none": 0.03334150198101965,
461
- "acc_norm,none": 0.2485207100591716,
462
- "acc_norm_stderr,none": 0.03334150198101965,
463
  "alias": " - cmmlu_virology"
464
  },
465
  "cmmlu_world_history": {
@@ -479,10 +479,10 @@
479
  },
480
  "groups": {
481
  "cmmlu": {
482
- "acc,none": 0.2509065791745812,
483
- "acc_stderr,none": 0.046349557976842405,
484
- "acc_norm,none": 0.2509065791745812,
485
- "acc_norm_stderr,none": 0.046349557976842405,
486
  "alias": "cmmlu"
487
  }
488
  },
@@ -3311,13 +3311,15 @@
3311
  "config": {
3312
  "model": "hf",
3313
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
3314
- "batch_size": "2",
3315
- "batch_sizes": [],
 
 
3316
  "device": null,
3317
  "use_cache": null,
3318
  "limit": null,
3319
  "bootstrap_iters": 100000,
3320
  "gen_kwargs": null
3321
  },
3322
- "git_hash": "4701655"
3323
  }
 
1
  {
2
  "results": {
3
  "cmmlu": {
4
+ "acc,none": 0.2485753755828009,
5
+ "acc_stderr,none": 0.042815306797706565,
6
+ "acc_norm,none": 0.2485753755828009,
7
+ "acc_norm_stderr,none": 0.042815306797706565,
8
  "alias": "cmmlu"
9
  },
10
  "cmmlu_agronomy": {
11
+ "acc,none": 0.1952662721893491,
12
+ "acc_stderr,none": 0.030583351673923103,
13
+ "acc_norm,none": 0.1952662721893491,
14
+ "acc_norm_stderr,none": 0.030583351673923103,
15
  "alias": " - cmmlu_agronomy"
16
  },
17
  "cmmlu_anatomy": {
18
+ "acc,none": 0.22297297297297297,
19
+ "acc_stderr,none": 0.03433092518104002,
20
+ "acc_norm,none": 0.22297297297297297,
21
+ "acc_norm_stderr,none": 0.03433092518104002,
22
  "alias": " - cmmlu_anatomy"
23
  },
24
  "cmmlu_ancient_chinese": {
25
+ "acc,none": 0.24390243902439024,
26
+ "acc_stderr,none": 0.033635910482728223,
27
+ "acc_norm,none": 0.24390243902439024,
28
+ "acc_norm_stderr,none": 0.033635910482728223,
29
  "alias": " - cmmlu_ancient_chinese"
30
  },
31
  "cmmlu_arts": {
32
+ "acc,none": 0.29375,
33
+ "acc_stderr,none": 0.03612181848191273,
34
+ "acc_norm,none": 0.29375,
35
+ "acc_norm_stderr,none": 0.03612181848191273,
36
  "alias": " - cmmlu_arts"
37
  },
38
  "cmmlu_astronomy": {
 
43
  "alias": " - cmmlu_astronomy"
44
  },
45
  "cmmlu_business_ethics": {
46
+ "acc,none": 0.23444976076555024,
47
+ "acc_stderr,none": 0.029375148972005737,
48
+ "acc_norm,none": 0.23444976076555024,
49
+ "acc_norm_stderr,none": 0.029375148972005737,
50
  "alias": " - cmmlu_business_ethics"
51
  },
52
  "cmmlu_chinese_civil_service_exam": {
53
+ "acc,none": 0.21875,
54
+ "acc_stderr,none": 0.032784644885244255,
55
+ "acc_norm,none": 0.21875,
56
+ "acc_norm_stderr,none": 0.032784644885244255,
57
  "alias": " - cmmlu_chinese_civil_service_exam"
58
  },
59
  "cmmlu_chinese_driving_rule": {
60
+ "acc,none": 0.25190839694656486,
61
+ "acc_stderr,none": 0.03807387116306086,
62
+ "acc_norm,none": 0.25190839694656486,
63
+ "acc_norm_stderr,none": 0.03807387116306086,
64
  "alias": " - cmmlu_chinese_driving_rule"
65
  },
66
  "cmmlu_chinese_food_culture": {
67
+ "acc,none": 0.20588235294117646,
68
+ "acc_stderr,none": 0.034800469312350674,
69
+ "acc_norm,none": 0.20588235294117646,
70
+ "acc_norm_stderr,none": 0.034800469312350674,
71
  "alias": " - cmmlu_chinese_food_culture"
72
  },
73
  "cmmlu_chinese_foreign_policy": {
74
+ "acc,none": 0.2336448598130841,
75
+ "acc_stderr,none": 0.041099848424639984,
76
+ "acc_norm,none": 0.2336448598130841,
77
+ "acc_norm_stderr,none": 0.041099848424639984,
78
  "alias": " - cmmlu_chinese_foreign_policy"
79
  },
80
  "cmmlu_chinese_history": {
81
+ "acc,none": 0.29721362229102166,
82
+ "acc_stderr,none": 0.025469363219004768,
83
+ "acc_norm,none": 0.29721362229102166,
84
+ "acc_norm_stderr,none": 0.025469363219004768,
85
  "alias": " - cmmlu_chinese_history"
86
  },
87
  "cmmlu_chinese_literature": {
88
+ "acc,none": 0.22058823529411764,
89
+ "acc_stderr,none": 0.02910225438967408,
90
+ "acc_norm,none": 0.22058823529411764,
91
+ "acc_norm_stderr,none": 0.02910225438967408,
92
  "alias": " - cmmlu_chinese_literature"
93
  },
94
  "cmmlu_chinese_teacher_qualification": {
95
+ "acc,none": 0.22346368715083798,
96
+ "acc_stderr,none": 0.031222980919579764,
97
+ "acc_norm,none": 0.22346368715083798,
98
+ "acc_norm_stderr,none": 0.031222980919579764,
99
  "alias": " - cmmlu_chinese_teacher_qualification"
100
  },
101
  "cmmlu_clinical_knowledge": {
102
+ "acc,none": 0.22784810126582278,
103
+ "acc_stderr,none": 0.027303484599069443,
104
+ "acc_norm,none": 0.22784810126582278,
105
+ "acc_norm_stderr,none": 0.027303484599069443,
106
  "alias": " - cmmlu_clinical_knowledge"
107
  },
108
  "cmmlu_college_actuarial_science": {
109
+ "acc,none": 0.2830188679245283,
110
+ "acc_stderr,none": 0.04396093377439375,
111
+ "acc_norm,none": 0.2830188679245283,
112
+ "acc_norm_stderr,none": 0.04396093377439375,
113
  "alias": " - cmmlu_college_actuarial_science"
114
  },
115
  "cmmlu_college_education": {
116
+ "acc,none": 0.2897196261682243,
117
+ "acc_stderr,none": 0.0440606533474851,
118
+ "acc_norm,none": 0.2897196261682243,
119
+ "acc_norm_stderr,none": 0.0440606533474851,
120
  "alias": " - cmmlu_college_education"
121
  },
122
  "cmmlu_college_engineering_hydrology": {
123
+ "acc,none": 0.2641509433962264,
124
+ "acc_stderr,none": 0.043025487739590106,
125
+ "acc_norm,none": 0.2641509433962264,
126
+ "acc_norm_stderr,none": 0.043025487739590106,
127
  "alias": " - cmmlu_college_engineering_hydrology"
128
  },
129
  "cmmlu_college_law": {
130
+ "acc,none": 0.2962962962962963,
131
+ "acc_stderr,none": 0.044143436668549335,
132
+ "acc_norm,none": 0.2962962962962963,
133
+ "acc_norm_stderr,none": 0.044143436668549335,
134
  "alias": " - cmmlu_college_law"
135
  },
136
  "cmmlu_college_mathematics": {
137
+ "acc,none": 0.3047619047619048,
138
+ "acc_stderr,none": 0.04513676718168311,
139
+ "acc_norm,none": 0.3047619047619048,
140
+ "acc_norm_stderr,none": 0.04513676718168311,
141
  "alias": " - cmmlu_college_mathematics"
142
  },
143
  "cmmlu_college_medical_statistics": {
144
+ "acc,none": 0.19811320754716982,
145
+ "acc_stderr,none": 0.0388972228831855,
146
+ "acc_norm,none": 0.19811320754716982,
147
+ "acc_norm_stderr,none": 0.0388972228831855,
148
  "alias": " - cmmlu_college_medical_statistics"
149
  },
150
  "cmmlu_college_medicine": {
151
+ "acc,none": 0.23809523809523808,
152
+ "acc_stderr,none": 0.02582505450222104,
153
+ "acc_norm,none": 0.23809523809523808,
154
+ "acc_norm_stderr,none": 0.02582505450222104,
155
  "alias": " - cmmlu_college_medicine"
156
  },
157
  "cmmlu_computer_science": {
158
+ "acc,none": 0.24509803921568626,
159
+ "acc_stderr,none": 0.030190282453501964,
160
+ "acc_norm,none": 0.24509803921568626,
161
+ "acc_norm_stderr,none": 0.030190282453501964,
162
  "alias": " - cmmlu_computer_science"
163
  },
164
  "cmmlu_computer_security": {
 
169
  "alias": " - cmmlu_computer_security"
170
  },
171
  "cmmlu_conceptual_physics": {
172
+ "acc,none": 0.23129251700680273,
173
+ "acc_stderr,none": 0.034896744812616155,
174
+ "acc_norm,none": 0.23129251700680273,
175
+ "acc_norm_stderr,none": 0.034896744812616155,
176
  "alias": " - cmmlu_conceptual_physics"
177
  },
178
  "cmmlu_construction_project_management": {
179
+ "acc,none": 0.28776978417266186,
180
+ "acc_stderr,none": 0.03853836179233389,
181
+ "acc_norm,none": 0.28776978417266186,
182
+ "acc_norm_stderr,none": 0.03853836179233389,
183
  "alias": " - cmmlu_construction_project_management"
184
  },
185
  "cmmlu_economics": {
186
+ "acc,none": 0.27672955974842767,
187
+ "acc_stderr,none": 0.03559177035707934,
188
+ "acc_norm,none": 0.27672955974842767,
189
+ "acc_norm_stderr,none": 0.03559177035707934,
190
  "alias": " - cmmlu_economics"
191
  },
192
  "cmmlu_education": {
193
+ "acc,none": 0.27607361963190186,
194
+ "acc_stderr,none": 0.0351238528370505,
195
+ "acc_norm,none": 0.27607361963190186,
196
+ "acc_norm_stderr,none": 0.0351238528370505,
197
  "alias": " - cmmlu_education"
198
  },
199
  "cmmlu_electrical_engineering": {
200
+ "acc,none": 0.27325581395348836,
201
+ "acc_stderr,none": 0.03407826167337437,
202
+ "acc_norm,none": 0.27325581395348836,
203
+ "acc_norm_stderr,none": 0.03407826167337437,
204
  "alias": " - cmmlu_electrical_engineering"
205
  },
206
  "cmmlu_elementary_chinese": {
207
+ "acc,none": 0.23412698412698413,
208
+ "acc_stderr,none": 0.0267280489993024,
209
+ "acc_norm,none": 0.23412698412698413,
210
+ "acc_norm_stderr,none": 0.0267280489993024,
211
  "alias": " - cmmlu_elementary_chinese"
212
  },
213
  "cmmlu_elementary_commonsense": {
 
218
  "alias": " - cmmlu_elementary_commonsense"
219
  },
220
  "cmmlu_elementary_information_and_technology": {
221
+ "acc,none": 0.24369747899159663,
222
+ "acc_stderr,none": 0.02788682807838058,
223
+ "acc_norm,none": 0.24369747899159663,
224
+ "acc_norm_stderr,none": 0.02788682807838058,
225
  "alias": " - cmmlu_elementary_information_and_technology"
226
  },
227
  "cmmlu_elementary_mathematics": {
 
232
  "alias": " - cmmlu_elementary_mathematics"
233
  },
234
  "cmmlu_ethnology": {
235
+ "acc,none": 0.2518518518518518,
236
+ "acc_stderr,none": 0.037498507091740234,
237
+ "acc_norm,none": 0.2518518518518518,
238
+ "acc_norm_stderr,none": 0.037498507091740234,
239
  "alias": " - cmmlu_ethnology"
240
  },
241
  "cmmlu_food_science": {
242
+ "acc,none": 0.2867132867132867,
243
+ "acc_stderr,none": 0.03795000212801782,
244
+ "acc_norm,none": 0.2867132867132867,
245
+ "acc_norm_stderr,none": 0.03795000212801782,
246
  "alias": " - cmmlu_food_science"
247
  },
248
  "cmmlu_genetics": {
249
+ "acc,none": 0.2897727272727273,
250
+ "acc_stderr,none": 0.034293230802398766,
251
+ "acc_norm,none": 0.2897727272727273,
252
+ "acc_norm_stderr,none": 0.034293230802398766,
253
  "alias": " - cmmlu_genetics"
254
  },
255
  "cmmlu_global_facts": {
256
+ "acc,none": 0.2483221476510067,
257
+ "acc_stderr,none": 0.0355134404169743,
258
+ "acc_norm,none": 0.2483221476510067,
259
+ "acc_norm_stderr,none": 0.0355134404169743,
260
  "alias": " - cmmlu_global_facts"
261
  },
262
  "cmmlu_high_school_biology": {
263
+ "acc,none": 0.23076923076923078,
264
+ "acc_stderr,none": 0.03250593287417369,
265
+ "acc_norm,none": 0.23076923076923078,
266
+ "acc_norm_stderr,none": 0.03250593287417369,
267
  "alias": " - cmmlu_high_school_biology"
268
  },
269
  "cmmlu_high_school_chemistry": {
270
+ "acc,none": 0.22727272727272727,
271
+ "acc_stderr,none": 0.03661433360410719,
272
+ "acc_norm,none": 0.22727272727272727,
273
+ "acc_norm_stderr,none": 0.03661433360410719,
274
  "alias": " - cmmlu_high_school_chemistry"
275
  },
276
  "cmmlu_high_school_geography": {
277
+ "acc,none": 0.2457627118644068,
278
+ "acc_stderr,none": 0.03980329854920432,
279
+ "acc_norm,none": 0.2457627118644068,
280
+ "acc_norm_stderr,none": 0.03980329854920432,
281
  "alias": " - cmmlu_high_school_geography"
282
  },
283
  "cmmlu_high_school_mathematics": {
284
+ "acc,none": 0.2621951219512195,
285
+ "acc_stderr,none": 0.0344500028917346,
286
+ "acc_norm,none": 0.2621951219512195,
287
+ "acc_norm_stderr,none": 0.0344500028917346,
288
  "alias": " - cmmlu_high_school_mathematics"
289
  },
290
  "cmmlu_high_school_physics": {
291
+ "acc,none": 0.2818181818181818,
292
+ "acc_stderr,none": 0.04309118709946458,
293
+ "acc_norm,none": 0.2818181818181818,
294
+ "acc_norm_stderr,none": 0.04309118709946458,
295
  "alias": " - cmmlu_high_school_physics"
296
  },
297
  "cmmlu_high_school_politics": {
 
302
  "alias": " - cmmlu_high_school_politics"
303
  },
304
  "cmmlu_human_sexuality": {
305
+ "acc,none": 0.19047619047619047,
306
+ "acc_stderr,none": 0.035122074123020534,
307
+ "acc_norm,none": 0.19047619047619047,
308
+ "acc_norm_stderr,none": 0.035122074123020534,
309
  "alias": " - cmmlu_human_sexuality"
310
  },
311
  "cmmlu_international_law": {
312
+ "acc,none": 0.25405405405405407,
313
+ "acc_stderr,none": 0.03209281645145386,
314
+ "acc_norm,none": 0.25405405405405407,
315
+ "acc_norm_stderr,none": 0.03209281645145386,
316
  "alias": " - cmmlu_international_law"
317
  },
318
  "cmmlu_journalism": {
319
+ "acc,none": 0.22674418604651161,
320
+ "acc_stderr,none": 0.032020758995849365,
321
+ "acc_norm,none": 0.22674418604651161,
322
+ "acc_norm_stderr,none": 0.032020758995849365,
323
  "alias": " - cmmlu_journalism"
324
  },
325
  "cmmlu_jurisprudence": {
326
+ "acc,none": 0.26763990267639903,
327
+ "acc_stderr,none": 0.021864816663672668,
328
+ "acc_norm,none": 0.26763990267639903,
329
+ "acc_norm_stderr,none": 0.021864816663672668,
330
  "alias": " - cmmlu_jurisprudence"
331
  },
332
  "cmmlu_legal_and_moral_basis": {
333
+ "acc,none": 0.3037383177570093,
334
+ "acc_stderr,none": 0.03150984286811783,
335
+ "acc_norm,none": 0.3037383177570093,
336
+ "acc_norm_stderr,none": 0.03150984286811783,
337
  "alias": " - cmmlu_legal_and_moral_basis"
338
  },
339
  "cmmlu_logical": {
340
+ "acc,none": 0.25203252032520324,
341
+ "acc_stderr,none": 0.039308795268239924,
342
+ "acc_norm,none": 0.25203252032520324,
343
+ "acc_norm_stderr,none": 0.039308795268239924,
344
  "alias": " - cmmlu_logical"
345
  },
346
  "cmmlu_machine_learning": {
347
+ "acc,none": 0.28688524590163933,
348
+ "acc_stderr,none": 0.041118866352671826,
349
+ "acc_norm,none": 0.28688524590163933,
350
+ "acc_norm_stderr,none": 0.041118866352671826,
351
  "alias": " - cmmlu_machine_learning"
352
  },
353
  "cmmlu_management": {
354
+ "acc,none": 0.19523809523809524,
355
+ "acc_stderr,none": 0.027418446398346896,
356
+ "acc_norm,none": 0.19523809523809524,
357
+ "acc_norm_stderr,none": 0.027418446398346896,
358
  "alias": " - cmmlu_management"
359
  },
360
  "cmmlu_marketing": {
361
+ "acc,none": 0.25,
362
+ "acc_stderr,none": 0.032364888900157734,
363
+ "acc_norm,none": 0.25,
364
+ "acc_norm_stderr,none": 0.032364888900157734,
365
  "alias": " - cmmlu_marketing"
366
  },
367
  "cmmlu_marxist_theory": {
368
+ "acc,none": 0.24338624338624337,
369
+ "acc_stderr,none": 0.031297251928558506,
370
+ "acc_norm,none": 0.24338624338624337,
371
+ "acc_norm_stderr,none": 0.031297251928558506,
372
  "alias": " - cmmlu_marxist_theory"
373
  },
374
  "cmmlu_modern_chinese": {
375
+ "acc,none": 0.23275862068965517,
376
+ "acc_stderr,none": 0.039406691683376995,
377
+ "acc_norm,none": 0.23275862068965517,
378
+ "acc_norm_stderr,none": 0.039406691683376995,
379
  "alias": " - cmmlu_modern_chinese"
380
  },
381
  "cmmlu_nutrition": {
382
+ "acc,none": 0.2689655172413793,
383
+ "acc_stderr,none": 0.036951833116502325,
384
+ "acc_norm,none": 0.2689655172413793,
385
+ "acc_norm_stderr,none": 0.036951833116502325,
386
  "alias": " - cmmlu_nutrition"
387
  },
388
  "cmmlu_philosophy": {
389
+ "acc,none": 0.3047619047619048,
390
+ "acc_stderr,none": 0.0451367671816831,
391
+ "acc_norm,none": 0.3047619047619048,
392
+ "acc_norm_stderr,none": 0.0451367671816831,
393
  "alias": " - cmmlu_philosophy"
394
  },
395
  "cmmlu_professional_accounting": {
396
+ "acc,none": 0.2342857142857143,
397
+ "acc_stderr,none": 0.032109360396926204,
398
+ "acc_norm,none": 0.2342857142857143,
399
+ "acc_norm_stderr,none": 0.032109360396926204,
400
  "alias": " - cmmlu_professional_accounting"
401
  },
402
  "cmmlu_professional_law": {
403
+ "acc,none": 0.2843601895734597,
404
+ "acc_stderr,none": 0.031129489323148667,
405
+ "acc_norm,none": 0.2843601895734597,
406
+ "acc_norm_stderr,none": 0.031129489323148667,
407
  "alias": " - cmmlu_professional_law"
408
  },
409
  "cmmlu_professional_medicine": {
410
+ "acc,none": 0.26595744680851063,
411
+ "acc_stderr,none": 0.022816607010135298,
412
+ "acc_norm,none": 0.26595744680851063,
413
+ "acc_norm_stderr,none": 0.022816607010135298,
414
  "alias": " - cmmlu_professional_medicine"
415
  },
416
  "cmmlu_professional_psychology": {
417
+ "acc,none": 0.23706896551724138,
418
+ "acc_stderr,none": 0.02798169400862497,
419
+ "acc_norm,none": 0.23706896551724138,
420
+ "acc_norm_stderr,none": 0.02798169400862497,
421
  "alias": " - cmmlu_professional_psychology"
422
  },
423
  "cmmlu_public_relations": {
424
+ "acc,none": 0.25862068965517243,
425
+ "acc_stderr,none": 0.033291151121447815,
426
+ "acc_norm,none": 0.25862068965517243,
427
+ "acc_norm_stderr,none": 0.033291151121447815,
428
  "alias": " - cmmlu_public_relations"
429
  },
430
  "cmmlu_security_study": {
 
435
  "alias": " - cmmlu_security_study"
436
  },
437
  "cmmlu_sociology": {
438
+ "acc,none": 0.22566371681415928,
439
+ "acc_stderr,none": 0.027867910955296744,
440
+ "acc_norm,none": 0.22566371681415928,
441
+ "acc_norm_stderr,none": 0.027867910955296744,
442
  "alias": " - cmmlu_sociology"
443
  },
444
  "cmmlu_sports_science": {
445
+ "acc,none": 0.23030303030303031,
446
+ "acc_stderr,none": 0.03287666758603489,
447
+ "acc_norm,none": 0.23030303030303031,
448
+ "acc_norm_stderr,none": 0.03287666758603489,
449
  "alias": " - cmmlu_sports_science"
450
  },
451
  "cmmlu_traditional_chinese_medicine": {
452
  "acc,none": 0.23243243243243245,
453
+ "acc_stderr,none": 0.031138505170794653,
454
  "acc_norm,none": 0.23243243243243245,
455
+ "acc_norm_stderr,none": 0.031138505170794653,
456
  "alias": " - cmmlu_traditional_chinese_medicine"
457
  },
458
  "cmmlu_virology": {
459
+ "acc,none": 0.24260355029585798,
460
+ "acc_stderr,none": 0.03307162750323179,
461
+ "acc_norm,none": 0.24260355029585798,
462
+ "acc_norm_stderr,none": 0.03307162750323179,
463
  "alias": " - cmmlu_virology"
464
  },
465
  "cmmlu_world_history": {
 
479
  },
480
  "groups": {
481
  "cmmlu": {
482
+ "acc,none": 0.2485753755828009,
483
+ "acc_stderr,none": 0.042815306797706565,
484
+ "acc_norm,none": 0.2485753755828009,
485
+ "acc_norm_stderr,none": 0.042815306797706565,
486
  "alias": "cmmlu"
487
  }
488
  },
 
3311
  "config": {
3312
  "model": "hf",
3313
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
3314
+ "batch_size": "auto",
3315
+ "batch_sizes": [
3316
+ 8
3317
+ ],
3318
  "device": null,
3319
  "use_cache": null,
3320
  "limit": null,
3321
  "bootstrap_iters": 100000,
3322
  "gen_kwargs": null
3323
  },
3324
+ "git_hash": "2e3ceb0"
3325
  }
lm-eval-output/allenai/OLMo-7B/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3969b9c5b5f77b7d2db2d31bce13b271b0b55f53de1c06b12e1484a260cc5ea9
3
- size 129703
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc69f38ccd1ff5eed2fe9dbdfb6e94b5d94c6c9d0c737ef0a4f268f3b0b4733c
3
+ size 111568
lm-eval-output/allenai/OLMo-7B/cola/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/result-jsonl.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3862814204891a844fdf5337965d761f05144ba67d8d95ab5b6cfa8973e8bf9e
3
- size 59222
 
 
 
 
lm-eval-output/allenai/OLMo-7B/cola/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "results": {
3
  "cola": {
4
- "mcc,none": 0.012008657795714008,
5
- "mcc_stderr,none": 0.03141923039277966,
6
  "alias": "cola"
7
  }
8
  },
@@ -46,13 +46,15 @@
46
  "config": {
47
  "model": "hf",
48
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
49
- "batch_size": "2",
50
- "batch_sizes": [],
 
 
51
  "device": null,
52
  "use_cache": null,
53
  "limit": null,
54
  "bootstrap_iters": 100000,
55
  "gen_kwargs": null
56
  },
57
- "git_hash": "4701655"
58
  }
 
1
  {
2
  "results": {
3
  "cola": {
4
+ "mcc,none": 0.003737743780434562,
5
+ "mcc_stderr,none": 0.031171364680531898,
6
  "alias": "cola"
7
  }
8
  },
 
46
  "config": {
47
  "model": "hf",
48
  "model_args": "pretrained=allenai/OLMo-7B,dtype=bfloat16,trust_remote_code=True",
49
+ "batch_size": "auto",
50
+ "batch_sizes": [
51
+ 64
52
+ ],
53
  "device": null,
54
  "use_cache": null,
55
  "limit": null,
56
  "bootstrap_iters": 100000,
57
  "gen_kwargs": null
58
  },
59
+ "git_hash": "2e3ceb0"
60
  }