jbnayahu commited on
Commit
c47d3ec
·
unverified ·
1 Parent(s): c1db7ac

Signed-off-by: Jonathan Bnayahu <[email protected]>

results/bluebench/2025-06-21T08-38-27_evaluation_results.json ADDED
@@ -0,0 +1,1283 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "environment_info": {
3
+ "timestamp_utc": "2025-06-21T12:38:23.605149Z",
4
+ "command_line_invocation": [
5
+ "/dccstor/jbworks/miniforge3/envs/bb/bin/unitxt-evaluate",
6
+ "--tasks",
7
+ "benchmarks.bluebench",
8
+ "--model",
9
+ "cross_provider",
10
+ "--model_args",
11
+ "model_name=watsonx/meta-llama/llama-3-2-11b-vision-instruct,max_tokens=256",
12
+ "--output_path",
13
+ "./results/bluebench",
14
+ "--log_samples",
15
+ "--trust_remote_code",
16
+ "--batch_size",
17
+ "8",
18
+ "--verbosity",
19
+ "ERROR"
20
+ ],
21
+ "parsed_arguments": {
22
+ "tasks": [
23
+ "benchmarks.bluebench"
24
+ ],
25
+ "split": "test",
26
+ "num_fewshots": null,
27
+ "limit": null,
28
+ "batch_size": 8,
29
+ "model": "watsonx/meta-llama/llama-3-2-11b-vision-instruct",
30
+ "model_args": {
31
+ "max_tokens": 256
32
+ },
33
+ "gen_kwargs": null,
34
+ "chat_template_kwargs": null,
35
+ "output_path": "./results/bluebench",
36
+ "output_file_prefix": "evaluation_results",
37
+ "log_samples": true,
38
+ "verbosity": "ERROR",
39
+ "apply_chat_template": false,
40
+ "trust_remote_code": true,
41
+ "disable_hf_cache": false,
42
+ "cache_dir": null
43
+ },
44
+ "unitxt_version": "1.24.0",
45
+ "unitxt_commit_hash": "2bfd4494ec443ef86013e30d31f4860177124476",
46
+ "python_version": "3.10.18",
47
+ "system": "Linux",
48
+ "system_version": "#1 SMP PREEMPT_DYNAMIC Fri Aug 9 14:06:03 EDT 2024",
49
+ "installed_packages": {
50
+ "nvidia-cufile-cu12": "1.11.1.6",
51
+ "triton": "3.3.1",
52
+ "nltk": "3.9.1",
53
+ "anyio": "4.9.0",
54
+ "absl-py": "2.3.0",
55
+ "tiktoken": "0.9.0",
56
+ "charset-normalizer": "3.4.2",
57
+ "nvidia-cuda-runtime-cu12": "12.6.77",
58
+ "sympy": "1.14.0",
59
+ "mecab-ko": "1.0.1",
60
+ "litellm": "1.72.6.post1",
61
+ "httpcore": "1.0.9",
62
+ "Jinja2": "3.1.6",
63
+ "jsonschema-specifications": "2025.4.1",
64
+ "pydantic_core": "2.33.2",
65
+ "nvidia-cusparse-cu12": "12.5.4.2",
66
+ "yarl": "1.20.1",
67
+ "openai": "1.88.0",
68
+ "portalocker": "3.2.0",
69
+ "pandas": "2.3.0",
70
+ "multiprocess": "0.70.16",
71
+ "jsonschema": "4.24.0",
72
+ "unitxt": "1.24.0",
73
+ "nvidia-nvjitlink-cu12": "12.6.85",
74
+ "nvidia-cublas-cu12": "12.6.4.1",
75
+ "pydantic": "2.11.7",
76
+ "async-timeout": "5.0.1",
77
+ "annotated-types": "0.7.0",
78
+ "rouge_score": "0.1.2",
79
+ "contourpy": "1.3.2",
80
+ "aiosignal": "1.3.2",
81
+ "nvidia-cuda-cupti-cu12": "12.6.80",
82
+ "pillow": "11.2.1",
83
+ "six": "1.17.0",
84
+ "diskcache": "5.6.3",
85
+ "tqdm": "4.67.1",
86
+ "pyarrow": "20.0.0",
87
+ "h11": "0.16.0",
88
+ "zipp": "3.19.2",
89
+ "tzdata": "2025.2",
90
+ "bert-score": "0.3.13",
91
+ "setuptools": "80.9.0",
92
+ "referencing": "0.36.2",
93
+ "sacrebleu": "2.5.1",
94
+ "filelock": "3.18.0",
95
+ "urllib3": "2.5.0",
96
+ "scipy": "1.15.3",
97
+ "nvidia-nccl-cu12": "2.26.2",
98
+ "kiwisolver": "1.4.8",
99
+ "networkx": "3.4.2",
100
+ "typing-inspection": "0.4.1",
101
+ "lxml": "5.4.0",
102
+ "sniffio": "1.3.1",
103
+ "scikit-learn": "1.7.0",
104
+ "nvidia-curand-cu12": "10.3.7.77",
105
+ "pip": "25.1.1",
106
+ "fonttools": "4.58.4",
107
+ "transformers": "4.52.4",
108
+ "datasets": "3.6.0",
109
+ "nvidia-cusolver-cu12": "11.7.1.2",
110
+ "cycler": "0.12.1",
111
+ "evaluate": "0.4.3",
112
+ "distro": "1.9.0",
113
+ "idna": "3.10",
114
+ "MarkupSafe": "3.0.2",
115
+ "frozenlist": "1.7.0",
116
+ "pyparsing": "3.2.3",
117
+ "jiter": "0.10.0",
118
+ "importlib_metadata": "8.0.0",
119
+ "packaging": "24.2",
120
+ "psutil": "7.0.0",
121
+ "mecab-ko-dic": "1.0.0",
122
+ "joblib": "1.5.1",
123
+ "fsspec": "2025.3.0",
124
+ "dill": "0.3.8",
125
+ "tokenizers": "0.21.1",
126
+ "wheel": "0.45.1",
127
+ "nvidia-nvtx-cu12": "12.6.77",
128
+ "nvidia-cusparselt-cu12": "0.6.3",
129
+ "hf-xet": "1.1.4",
130
+ "propcache": "0.3.2",
131
+ "numpy": "2.2.6",
132
+ "mpmath": "1.3.0",
133
+ "multidict": "6.5.0",
134
+ "conllu": "6.0.0",
135
+ "safetensors": "0.5.3",
136
+ "requests": "2.32.4",
137
+ "regex": "2024.11.6",
138
+ "aiohttp": "3.12.13",
139
+ "tabulate": "0.9.0",
140
+ "certifi": "2025.6.15",
141
+ "accelerate": "1.8.0",
142
+ "nvidia-cufft-cu12": "11.3.0.4",
143
+ "nvidia-cuda-nvrtc-cu12": "12.6.77",
144
+ "click": "8.2.1",
145
+ "typing_extensions": "4.12.2",
146
+ "attrs": "25.3.0",
147
+ "exceptiongroup": "1.3.0",
148
+ "tenacity": "9.1.2",
149
+ "pytz": "2025.2",
150
+ "aiohappyeyeballs": "2.6.1",
151
+ "python-dateutil": "2.9.0.post0",
152
+ "torch": "2.7.1",
153
+ "python-dotenv": "1.1.0",
154
+ "httpx": "0.28.1",
155
+ "matplotlib": "3.10.3",
156
+ "xxhash": "3.5.0",
157
+ "PyYAML": "6.0.2",
158
+ "huggingface-hub": "0.33.0",
159
+ "colorama": "0.4.6",
160
+ "rpds-py": "0.25.1",
161
+ "threadpoolctl": "3.6.0",
162
+ "nvidia-cudnn-cu12": "9.5.1.17",
163
+ "jaraco.collections": "5.1.0",
164
+ "tomli": "2.0.1",
165
+ "backports.tarfile": "1.2.0",
166
+ "jaraco.context": "5.3.0",
167
+ "typeguard": "4.3.0",
168
+ "autocommand": "2.2.2",
169
+ "jaraco.text": "3.12.1",
170
+ "more-itertools": "10.3.0",
171
+ "platformdirs": "4.2.2",
172
+ "inflect": "7.3.1",
173
+ "jaraco.functools": "4.0.1"
174
+ }
175
+ },
176
+ "results": {
177
+ "bias": {
178
+ "safety_bbq_age": {
179
+ "accuracy": 0.7555555555555555,
180
+ "accuracy_ci_low": 0.6666666666666666,
181
+ "accuracy_ci_high": 0.8333333333333334,
182
+ "score_name": "accuracy",
183
+ "score": 0.7555555555555555,
184
+ "score_ci_high": 0.8333333333333334,
185
+ "score_ci_low": 0.6666666666666666,
186
+ "num_of_instances": 90
187
+ },
188
+ "safety_bbq_disability_status": {
189
+ "accuracy": 0.8,
190
+ "accuracy_ci_low": 0.7111111111111111,
191
+ "accuracy_ci_high": 0.8666666666666667,
192
+ "score_name": "accuracy",
193
+ "score": 0.8,
194
+ "score_ci_high": 0.8666666666666667,
195
+ "score_ci_low": 0.7111111111111111,
196
+ "num_of_instances": 90
197
+ },
198
+ "safety_bbq_gender_identity": {
199
+ "accuracy": 0.9,
200
+ "accuracy_ci_low": 0.8333333333333334,
201
+ "accuracy_ci_high": 0.9555555555555556,
202
+ "score_name": "accuracy",
203
+ "score": 0.9,
204
+ "score_ci_high": 0.9555555555555556,
205
+ "score_ci_low": 0.8333333333333334,
206
+ "num_of_instances": 90
207
+ },
208
+ "safety_bbq_nationality": {
209
+ "accuracy": 0.8,
210
+ "accuracy_ci_low": 0.7222222222222222,
211
+ "accuracy_ci_high": 0.8777777777777778,
212
+ "score_name": "accuracy",
213
+ "score": 0.8,
214
+ "score_ci_high": 0.8777777777777778,
215
+ "score_ci_low": 0.7222222222222222,
216
+ "num_of_instances": 90
217
+ },
218
+ "safety_bbq_physical_appearance": {
219
+ "accuracy": 0.8333333333333334,
220
+ "accuracy_ci_low": 0.7444444444444445,
221
+ "accuracy_ci_high": 0.9,
222
+ "score_name": "accuracy",
223
+ "score": 0.8333333333333334,
224
+ "score_ci_high": 0.9,
225
+ "score_ci_low": 0.7444444444444445,
226
+ "num_of_instances": 90
227
+ },
228
+ "safety_bbq_race_ethnicity": {
229
+ "accuracy": 0.9,
230
+ "accuracy_ci_low": 0.8277783447952625,
231
+ "accuracy_ci_high": 0.9448695638574703,
232
+ "score_name": "accuracy",
233
+ "score": 0.9,
234
+ "score_ci_high": 0.9448695638574703,
235
+ "score_ci_low": 0.8277783447952625,
236
+ "num_of_instances": 90
237
+ },
238
+ "safety_bbq_race_x_gender": {
239
+ "accuracy": 0.9777777777777777,
240
+ "accuracy_ci_low": 0.9222222222222223,
241
+ "accuracy_ci_high": 1.0,
242
+ "score_name": "accuracy",
243
+ "score": 0.9777777777777777,
244
+ "score_ci_high": 1.0,
245
+ "score_ci_low": 0.9222222222222223,
246
+ "num_of_instances": 90
247
+ },
248
+ "safety_bbq_race_x_ses": {
249
+ "accuracy": 0.8555555555555555,
250
+ "accuracy_ci_low": 0.7777777777777778,
251
+ "accuracy_ci_high": 0.9222222222222223,
252
+ "score_name": "accuracy",
253
+ "score": 0.8555555555555555,
254
+ "score_ci_high": 0.9222222222222223,
255
+ "score_ci_low": 0.7777777777777778,
256
+ "num_of_instances": 90
257
+ },
258
+ "safety_bbq_religion": {
259
+ "accuracy": 0.9,
260
+ "accuracy_ci_low": 0.822517354780987,
261
+ "accuracy_ci_high": 0.9444444444444444,
262
+ "score_name": "accuracy",
263
+ "score": 0.9,
264
+ "score_ci_high": 0.9444444444444444,
265
+ "score_ci_low": 0.822517354780987,
266
+ "num_of_instances": 90
267
+ },
268
+ "safety_bbq_ses": {
269
+ "accuracy": 0.8,
270
+ "accuracy_ci_low": 0.7111111111111111,
271
+ "accuracy_ci_high": 0.8777777777777778,
272
+ "score_name": "accuracy",
273
+ "score": 0.8,
274
+ "score_ci_high": 0.8777777777777778,
275
+ "score_ci_low": 0.7111111111111111,
276
+ "num_of_instances": 90
277
+ },
278
+ "safety_bbq_sexual_orientation": {
279
+ "accuracy": 0.8888888888888888,
280
+ "accuracy_ci_low": 0.8111111111111111,
281
+ "accuracy_ci_high": 0.9444444444444444,
282
+ "score_name": "accuracy",
283
+ "score": 0.8888888888888888,
284
+ "score_ci_high": 0.9444444444444444,
285
+ "score_ci_low": 0.8111111111111111,
286
+ "num_of_instances": 90
287
+ },
288
+ "score": 0.8555555555555555,
289
+ "score_name": "subsets_mean",
290
+ "num_of_instances": 990
291
+ },
292
+ "chatbot_abilities": {
293
+ "arena_hard_generation_english_gpt_4_0314_reference": {
294
+ "num_of_instances": 500,
295
+ "llama_3_70b_instruct_template_arena_hard": 0.5,
296
+ "score": 0.5,
297
+ "score_name": "llama_3_70b_instruct_template_arena_hard"
298
+ },
299
+ "score": 0.5,
300
+ "score_name": "subsets_mean",
301
+ "num_of_instances": 500
302
+ },
303
+ "entity_extraction": {
304
+ "universal_ner_en_ewt": {
305
+ "num_of_instances": 1000,
306
+ "f1_Person": 0.5,
307
+ "f1_Organization": 0.3540372670807453,
308
+ "f1_Location": 0.35390946502057613,
309
+ "f1_macro": 0.40264891070044045,
310
+ "recall_macro": 0.3613703019405527,
311
+ "precision_macro": 0.46075111602245217,
312
+ "in_classes_support": 0.7734806629834254,
313
+ "f1_micro": 0.3651685393258427,
314
+ "recall_micro": 0.37142857142857144,
315
+ "precision_micro": 0.35911602209944754,
316
+ "score": 0.3651685393258427,
317
+ "score_name": "f1_micro",
318
+ "score_ci_low": 0.3095577857641209,
319
+ "score_ci_high": 0.42280766665241554,
320
+ "f1_micro_ci_low": 0.3095577857641209,
321
+ "f1_micro_ci_high": 0.42280766665241554
322
+ },
323
+ "score": 0.3651685393258427,
324
+ "score_name": "subsets_mean",
325
+ "num_of_instances": 1000
326
+ },
327
+ "knowledge": {
328
+ "mmlu_pro_biology": {
329
+ "accuracy": 0.5492957746478874,
330
+ "accuracy_ci_low": 0.4445856612942033,
331
+ "accuracy_ci_high": 0.6619718309859155,
332
+ "score_name": "accuracy",
333
+ "score": 0.5492957746478874,
334
+ "score_ci_high": 0.6619718309859155,
335
+ "score_ci_low": 0.4445856612942033,
336
+ "num_of_instances": 71
337
+ },
338
+ "mmlu_pro_business": {
339
+ "accuracy": 0.2676056338028169,
340
+ "accuracy_ci_low": 0.16901408450704225,
341
+ "accuracy_ci_high": 0.38028169014084506,
342
+ "score_name": "accuracy",
343
+ "score": 0.2676056338028169,
344
+ "score_ci_high": 0.38028169014084506,
345
+ "score_ci_low": 0.16901408450704225,
346
+ "num_of_instances": 71
347
+ },
348
+ "mmlu_pro_chemistry": {
349
+ "accuracy": 0.28169014084507044,
350
+ "accuracy_ci_low": 0.18309859154929578,
351
+ "accuracy_ci_high": 0.39436619718309857,
352
+ "score_name": "accuracy",
353
+ "score": 0.28169014084507044,
354
+ "score_ci_high": 0.39436619718309857,
355
+ "score_ci_low": 0.18309859154929578,
356
+ "num_of_instances": 71
357
+ },
358
+ "mmlu_pro_computer_science": {
359
+ "accuracy": 0.39436619718309857,
360
+ "accuracy_ci_low": 0.28169014084507044,
361
+ "accuracy_ci_high": 0.5070422535211268,
362
+ "score_name": "accuracy",
363
+ "score": 0.39436619718309857,
364
+ "score_ci_high": 0.5070422535211268,
365
+ "score_ci_low": 0.28169014084507044,
366
+ "num_of_instances": 71
367
+ },
368
+ "mmlu_pro_economics": {
369
+ "accuracy": 0.4788732394366197,
370
+ "accuracy_ci_low": 0.36619718309859156,
371
+ "accuracy_ci_high": 0.5915492957746479,
372
+ "score_name": "accuracy",
373
+ "score": 0.4788732394366197,
374
+ "score_ci_high": 0.5915492957746479,
375
+ "score_ci_low": 0.36619718309859156,
376
+ "num_of_instances": 71
377
+ },
378
+ "mmlu_pro_engineering": {
379
+ "accuracy": 0.23943661971830985,
380
+ "accuracy_ci_low": 0.14084507042253522,
381
+ "accuracy_ci_high": 0.3380281690140845,
382
+ "score_name": "accuracy",
383
+ "score": 0.23943661971830985,
384
+ "score_ci_high": 0.3380281690140845,
385
+ "score_ci_low": 0.14084507042253522,
386
+ "num_of_instances": 71
387
+ },
388
+ "mmlu_pro_health": {
389
+ "accuracy": 0.43661971830985913,
390
+ "accuracy_ci_low": 0.323943661971831,
391
+ "accuracy_ci_high": 0.5492957746478874,
392
+ "score_name": "accuracy",
393
+ "score": 0.43661971830985913,
394
+ "score_ci_high": 0.5492957746478874,
395
+ "score_ci_low": 0.323943661971831,
396
+ "num_of_instances": 71
397
+ },
398
+ "mmlu_pro_history": {
399
+ "accuracy": 0.5070422535211268,
400
+ "accuracy_ci_low": 0.39436619718309857,
401
+ "accuracy_ci_high": 0.6197183098591549,
402
+ "score_name": "accuracy",
403
+ "score": 0.5070422535211268,
404
+ "score_ci_high": 0.6197183098591549,
405
+ "score_ci_low": 0.39436619718309857,
406
+ "num_of_instances": 71
407
+ },
408
+ "mmlu_pro_law": {
409
+ "accuracy": 0.28169014084507044,
410
+ "accuracy_ci_low": 0.18309859154929578,
411
+ "accuracy_ci_high": 0.39436619718309857,
412
+ "score_name": "accuracy",
413
+ "score": 0.28169014084507044,
414
+ "score_ci_high": 0.39436619718309857,
415
+ "score_ci_low": 0.18309859154929578,
416
+ "num_of_instances": 71
417
+ },
418
+ "mmlu_pro_math": {
419
+ "accuracy": 0.14084507042253522,
420
+ "accuracy_ci_low": 0.07042253521126761,
421
+ "accuracy_ci_high": 0.25119134125976145,
422
+ "score_name": "accuracy",
423
+ "score": 0.14084507042253522,
424
+ "score_ci_high": 0.25119134125976145,
425
+ "score_ci_low": 0.07042253521126761,
426
+ "num_of_instances": 71
427
+ },
428
+ "mmlu_pro_other": {
429
+ "accuracy": 0.2112676056338028,
430
+ "accuracy_ci_low": 0.1267605633802817,
431
+ "accuracy_ci_high": 0.323943661971831,
432
+ "score_name": "accuracy",
433
+ "score": 0.2112676056338028,
434
+ "score_ci_high": 0.323943661971831,
435
+ "score_ci_low": 0.1267605633802817,
436
+ "num_of_instances": 71
437
+ },
438
+ "mmlu_pro_philosophy": {
439
+ "accuracy": 0.43661971830985913,
440
+ "accuracy_ci_low": 0.323943661971831,
441
+ "accuracy_ci_high": 0.5633802816901409,
442
+ "score_name": "accuracy",
443
+ "score": 0.43661971830985913,
444
+ "score_ci_high": 0.5633802816901409,
445
+ "score_ci_low": 0.323943661971831,
446
+ "num_of_instances": 71
447
+ },
448
+ "mmlu_pro_physics": {
449
+ "accuracy": 0.28169014084507044,
450
+ "accuracy_ci_low": 0.18309859154929578,
451
+ "accuracy_ci_high": 0.39436619718309857,
452
+ "score_name": "accuracy",
453
+ "score": 0.28169014084507044,
454
+ "score_ci_high": 0.39436619718309857,
455
+ "score_ci_low": 0.18309859154929578,
456
+ "num_of_instances": 71
457
+ },
458
+ "mmlu_pro_psychology": {
459
+ "accuracy": 0.5633802816901409,
460
+ "accuracy_ci_low": 0.43661971830985913,
461
+ "accuracy_ci_high": 0.676056338028169,
462
+ "score_name": "accuracy",
463
+ "score": 0.5633802816901409,
464
+ "score_ci_high": 0.676056338028169,
465
+ "score_ci_low": 0.43661971830985913,
466
+ "num_of_instances": 71
467
+ },
468
+ "score": 0.36217303822937624,
469
+ "score_name": "subsets_mean",
470
+ "num_of_instances": 994
471
+ },
472
+ "legal": {
473
+ "legalbench_abercrombie": {
474
+ "f1_macro": 0.5714968851810958,
475
+ "f1_suggestive": 0.38461538461538464,
476
+ "f1_fanciful": 0.4666666666666667,
477
+ "f1_generic": 0.8148148148148148,
478
+ "f1_arbitrary": 0.45454545454545453,
479
+ "f1_descriptive": 0.7368421052631579,
480
+ "f1_macro_ci_low": 0.477080408933172,
481
+ "f1_macro_ci_high": 0.6804913151553064,
482
+ "score_name": "f1_micro",
483
+ "score": 0.5696969696969697,
484
+ "score_ci_high": 0.6707317073170732,
485
+ "score_ci_low": 0.4662576687116564,
486
+ "num_of_instances": 85,
487
+ "accuracy": 0.5529411764705883,
488
+ "accuracy_ci_low": 0.4470588235294118,
489
+ "accuracy_ci_high": 0.6588235294117647,
490
+ "f1_micro": 0.5696969696969697,
491
+ "f1_micro_ci_low": 0.4662576687116564,
492
+ "f1_micro_ci_high": 0.6707317073170732
493
+ },
494
+ "legalbench_corporate_lobbying": {
495
+ "f1_macro": 0.6068255091609656,
496
+ "f1_no": 0.691358024691358,
497
+ "f1_yes": 0.5222929936305732,
498
+ "f1_macro_ci_low": 0.5410272293620183,
499
+ "f1_macro_ci_high": 0.6785168220322721,
500
+ "score_name": "f1_micro",
501
+ "score": 0.625,
502
+ "score_ci_high": 0.695,
503
+ "score_ci_low": 0.56,
504
+ "num_of_instances": 200,
505
+ "accuracy": 0.625,
506
+ "accuracy_ci_low": 0.56,
507
+ "accuracy_ci_high": 0.695,
508
+ "f1_micro": 0.625,
509
+ "f1_micro_ci_low": 0.56,
510
+ "f1_micro_ci_high": 0.695
511
+ },
512
+ "legalbench_function_of_decision_section": {
513
+ "f1_macro": 0.341120203240043,
514
+ "f1_conclusion": 0.09523809523809523,
515
+ "f1_analysis": 0.5046728971962616,
516
+ "f1_decree": 0.29411764705882354,
517
+ "f1_issue": 0.21818181818181817,
518
+ "f1_procedural history": 0.34782608695652173,
519
+ "f1_facts": 0.4878048780487805,
520
+ "f1_rule": 0.44,
521
+ "f1_macro_ci_low": 0.28339415146501146,
522
+ "f1_macro_ci_high": 0.41104677447147997,
523
+ "score_name": "f1_micro",
524
+ "score": 0.36683417085427134,
525
+ "score_ci_high": 0.43609022556390975,
526
+ "score_ci_low": 0.3057644110275689,
527
+ "num_of_instances": 200,
528
+ "accuracy": 0.365,
529
+ "accuracy_ci_low": 0.305,
530
+ "accuracy_ci_high": 0.435,
531
+ "f1_micro": 0.36683417085427134,
532
+ "f1_micro_ci_low": 0.3057644110275689,
533
+ "f1_micro_ci_high": 0.43609022556390975
534
+ },
535
+ "legalbench_international_citizenship_questions": {
536
+ "f1_macro": 0.5542857142857143,
537
+ "f1_yes": 0.42857142857142855,
538
+ "f1_no": 0.68,
539
+ "f1_macro_ci_low": 0.48355051877957367,
540
+ "f1_macro_ci_high": 0.6294200433350639,
541
+ "score_name": "f1_micro",
542
+ "score": 0.5897435897435898,
543
+ "score_ci_high": 0.662674821169658,
544
+ "score_ci_low": 0.5188702892801219,
545
+ "num_of_instances": 200,
546
+ "accuracy": 0.575,
547
+ "accuracy_ci_low": 0.505,
548
+ "accuracy_ci_high": 0.65,
549
+ "f1_micro": 0.5897435897435898,
550
+ "f1_micro_ci_low": 0.5188702892801219,
551
+ "f1_micro_ci_high": 0.662674821169658
552
+ },
553
+ "legalbench_proa": {
554
+ "f1_macro": 0.9155052264808363,
555
+ "f1_yes": 0.9024390243902439,
556
+ "f1_no": 0.9285714285714286,
557
+ "f1_macro_ci_low": 0.8363111281337707,
558
+ "f1_macro_ci_high": 0.9587803500846979,
559
+ "score_name": "f1_micro",
560
+ "score": 0.9156626506024096,
561
+ "score_ci_high": 0.9585798816568047,
562
+ "score_ci_low": 0.8372630005674523,
563
+ "num_of_instances": 85,
564
+ "accuracy": 0.8941176470588236,
565
+ "accuracy_ci_low": 0.8,
566
+ "accuracy_ci_high": 0.9466354743635627,
567
+ "f1_micro": 0.9156626506024096,
568
+ "f1_micro_ci_low": 0.8372630005674523,
569
+ "f1_micro_ci_high": 0.9585798816568047
570
+ },
571
+ "score": 0.6133874761794481,
572
+ "score_name": "subsets_mean",
573
+ "num_of_instances": 770
574
+ },
575
+ "news_classification": {
576
+ "20_newsgroups_short": {
577
+ "f1_macro": 0.5088753805047725,
578
+ "f1_cars": 0.7380952380952381,
579
+ "f1_windows x": 0.08333333333333333,
580
+ "f1_computer graphics": 0.5434782608695652,
581
+ "f1_atheism": 0.30434782608695654,
582
+ "f1_religion": 0.1388888888888889,
583
+ "f1_medicine": 0.7733333333333333,
584
+ "f1_christianity": 0.6597938144329897,
585
+ "f1_microsoft windows": 0.39436619718309857,
586
+ "f1_middle east": 0.22641509433962265,
587
+ "f1_politics": 0.3418803418803419,
588
+ "f1_motorcycles": 0.673469387755102,
589
+ "f1_pc hardware": 0.4968944099378882,
590
+ "f1_mac hardware": 0.5617977528089888,
591
+ "f1_electronics": 0.4580152671755725,
592
+ "f1_for sale": 0.4406779661016949,
593
+ "f1_guns": 0.28125,
594
+ "f1_space": 0.7111111111111111,
595
+ "f1_cryptography": 0.6197183098591549,
596
+ "f1_baseball": 0.8617886178861789,
597
+ "f1_hockey": 0.8688524590163934,
598
+ "f1_macro_ci_low": 0.4834761861756795,
599
+ "f1_macro_ci_high": 0.5399453530006973,
600
+ "score_name": "f1_micro",
601
+ "score": 0.5394515948517068,
602
+ "score_ci_high": 0.571442163952847,
603
+ "score_ci_low": 0.5099380125013132,
604
+ "num_of_instances": 1000,
605
+ "accuracy": 0.482,
606
+ "accuracy_ci_low": 0.45265982508763236,
607
+ "accuracy_ci_high": 0.514,
608
+ "f1_micro": 0.5394515948517068,
609
+ "f1_micro_ci_low": 0.5099380125013132,
610
+ "f1_micro_ci_high": 0.571442163952847
611
+ },
612
+ "score": 0.5394515948517068,
613
+ "score_name": "subsets_mean",
614
+ "num_of_instances": 1000
615
+ },
616
+ "product_help": {
617
+ "cfpb_product_2023": {
618
+ "f1_macro": 0.6253924964112395,
619
+ "f1_credit reporting or credit repair services or other personal consumer reports": 0.9158878504672897,
620
+ "f1_credit card or prepaid card": 0.625,
621
+ "f1_debt collection": 0.5882352941176471,
622
+ "f1_checking or savings account": 0.75,
623
+ "f1_money transfer or virtual currency or money service": 0.5517241379310345,
624
+ "f1_mortgage": 0.7407407407407407,
625
+ "f1_vehicle loan or lease": 0.5333333333333333,
626
+ "f1_payday loan or title loan or personal loan": 0.1111111111111111,
627
+ "f1_student loan": 0.8125,
628
+ "f1_macro_ci_low": 0.5783727985774465,
629
+ "f1_macro_ci_high": 0.684670784163184,
630
+ "score_name": "f1_micro",
631
+ "score": 0.8322284548699643,
632
+ "score_ci_high": 0.8548513405559257,
633
+ "score_ci_low": 0.809253238376061,
634
+ "num_of_instances": 1000,
635
+ "accuracy": 0.816,
636
+ "accuracy_ci_low": 0.794,
637
+ "accuracy_ci_high": 0.84,
638
+ "f1_micro": 0.8322284548699643,
639
+ "f1_micro_ci_low": 0.809253238376061,
640
+ "f1_micro_ci_high": 0.8548513405559257
641
+ },
642
+ "cfpb_product_watsonx": {
643
+ "f1_macro": 0.6741537589904952,
644
+ "f1_mortgages and loans": 0.7560975609756098,
645
+ "f1_credit card": 0.7589743589743589,
646
+ "f1_debt collection": 0.6829268292682927,
647
+ "f1_credit reporting": 0.7687296416938111,
648
+ "f1_retail banking": 0.40404040404040403,
649
+ "f1_macro_ci_low": 0.6305914704936404,
650
+ "f1_macro_ci_high": 0.7190040244929556,
651
+ "score_name": "f1_micro",
652
+ "score": 0.709278350515464,
653
+ "score_ci_high": 0.7473953353743584,
654
+ "score_ci_low": 0.6666666666666666,
655
+ "num_of_instances": 500,
656
+ "accuracy": 0.688,
657
+ "accuracy_ci_low": 0.644,
658
+ "accuracy_ci_high": 0.726,
659
+ "f1_micro": 0.709278350515464,
660
+ "f1_micro_ci_low": 0.6666666666666666,
661
+ "f1_micro_ci_high": 0.7473953353743584
662
+ },
663
+ "score": 0.7707534026927141,
664
+ "score_name": "subsets_mean",
665
+ "num_of_instances": 1500
666
+ },
667
+ "qa_finance": {
668
+ "fin_qa": {
669
+ "num_of_instances": 1000,
670
+ "execution_accuracy": 0.045,
671
+ "program_accuracy": 0.055,
672
+ "score": 0.055,
673
+ "score_name": "program_accuracy",
674
+ "execution_accuracy_ci_low": 0.033,
675
+ "execution_accuracy_ci_high": 0.058,
676
+ "program_accuracy_ci_low": 0.042,
677
+ "program_accuracy_ci_high": 0.0702228905446176,
678
+ "score_ci_low": 0.042,
679
+ "score_ci_high": 0.0702228905446176
680
+ },
681
+ "score": 0.055,
682
+ "score_name": "subsets_mean",
683
+ "num_of_instances": 1000
684
+ },
685
+ "rag_general": {
686
+ "rag_response_generation_clapnq": {
687
+ "precision": 0.3126916927096655,
688
+ "recall": 0.5297013169878433,
689
+ "f1": 0.3210773896996459,
690
+ "precision_ci_low": 0.2920307369068035,
691
+ "precision_ci_high": 0.33339870629057006,
692
+ "recall_ci_low": 0.512017589843317,
693
+ "recall_ci_high": 0.5445397559503604,
694
+ "f1_ci_low": 0.3051687944542956,
695
+ "f1_ci_high": 0.33801833203554277,
696
+ "score_name": "f1",
697
+ "score": 0.3210773896996459,
698
+ "score_ci_high": 0.33801833203554277,
699
+ "score_ci_low": 0.3051687944542956,
700
+ "num_of_instances": 600,
701
+ "correctness_f1_bert_score.deberta_large_mnli": 0.5835817578931649,
702
+ "correctness_recall_bert_score.deberta_large_mnli": 0.6631206824382146,
703
+ "correctness_precision_bert_score.deberta_large_mnli": 0.5450189588715633,
704
+ "faithfullness_f1_token_overlap": 0.26865346120635386,
705
+ "faithfullness_recall_token_overlap": 0.20704283031640539,
706
+ "faithfullness_precision_token_overlap": 0.5397055221971085,
707
+ "correctness_f1_token_overlap": 0.3210773896996459,
708
+ "correctness_recall_token_overlap": 0.5297013169878433,
709
+ "correctness_precision_token_overlap": 0.3126916927096655
710
+ },
711
+ "score": 0.3210773896996459,
712
+ "score_name": "subsets_mean",
713
+ "num_of_instances": 600
714
+ },
715
+ "reasoning": {
716
+ "hellaswag": {
717
+ "accuracy": 0.475,
718
+ "accuracy_ci_low": 0.4436611532620239,
719
+ "accuracy_ci_high": 0.504,
720
+ "score_name": "accuracy",
721
+ "score": 0.475,
722
+ "score_ci_high": 0.504,
723
+ "score_ci_low": 0.4436611532620239,
724
+ "num_of_instances": 1000
725
+ },
726
+ "openbook_qa": {
727
+ "accuracy": 0.798,
728
+ "accuracy_ci_low": 0.7633491592715252,
729
+ "accuracy_ci_high": 0.832,
730
+ "score_name": "accuracy",
731
+ "score": 0.798,
732
+ "score_ci_high": 0.832,
733
+ "score_ci_low": 0.7633491592715252,
734
+ "num_of_instances": 500
735
+ },
736
+ "score": 0.6365000000000001,
737
+ "score_name": "subsets_mean",
738
+ "num_of_instances": 1500
739
+ },
740
+ "safety": {
741
+ "attaq_500": {
742
+ "severity_critical": 0.0,
743
+ "severity_high": 1.0,
744
+ "severity_medium": 0.0,
745
+ "severity_low": 99.0,
746
+ "category_harmful_info": 0.8553694002921619,
747
+ "category_deception": 0.835255232046951,
748
+ "category_explicit_content": 0.8441415034761331,
749
+ "category_violence": 0.8099433737673931,
750
+ "category_discrimination": 0.770528504891055,
751
+ "category_substance_abuse": 0.8680479208628336,
752
+ "category_pii": 0.8295589322569192,
753
+ "safety": 0.8299434131611555,
754
+ "safety_ci_low": 0.8055372622125345,
755
+ "safety_ci_high": 0.8483225977870763,
756
+ "score_name": "safety",
757
+ "score": 0.8299434131611555,
758
+ "score_ci_high": 0.8483225977870763,
759
+ "score_ci_low": 0.8055372622125345,
760
+ "num_of_instances": 100
761
+ },
762
+ "score": 0.8299434131611555,
763
+ "score_name": "subsets_mean",
764
+ "num_of_instances": 100
765
+ },
766
+ "summarization": {
767
+ "billsum_document_filtered_to_6000_chars": {
768
+ "num_of_instances": 528,
769
+ "rougeLsum": 0.37058702847435154,
770
+ "rouge2": 0.22434285890237585,
771
+ "rougeL": 0.30351270566286986,
772
+ "score": 0.30351270566286986,
773
+ "score_name": "rougeL",
774
+ "rouge1": 0.4255147200867186,
775
+ "rougeLsum_ci_low": 0.36115259966566504,
776
+ "rougeLsum_ci_high": 0.379873984149421,
777
+ "rouge2_ci_low": 0.21612712173196832,
778
+ "rouge2_ci_high": 0.23293503766644144,
779
+ "rougeL_ci_low": 0.2950395615436246,
780
+ "rougeL_ci_high": 0.3118499963672728,
781
+ "score_ci_low": 0.2950395615436246,
782
+ "score_ci_high": 0.3118499963672728,
783
+ "rouge1_ci_low": 0.4147565222299168,
784
+ "rouge1_ci_high": 0.43557795557970264
785
+ },
786
+ "tldr_document_filtered_to_6000_chars": {
787
+ "num_of_instances": 1000,
788
+ "rougeLsum": 0.10643447818455329,
789
+ "rouge2": 0.018171690879646322,
790
+ "rougeL": 0.09269669467501682,
791
+ "score": 0.09269669467501682,
792
+ "score_name": "rougeL",
793
+ "rouge1": 0.12731717955128613,
794
+ "rougeLsum_ci_low": 0.10150863047194304,
795
+ "rougeLsum_ci_high": 0.11036755862721943,
796
+ "rouge2_ci_low": 0.016122174542425746,
797
+ "rouge2_ci_high": 0.02016619357964368,
798
+ "rougeL_ci_low": 0.08847971390285901,
799
+ "rougeL_ci_high": 0.09604727671370761,
800
+ "score_ci_low": 0.08847971390285901,
801
+ "score_ci_high": 0.09604727671370761,
802
+ "rouge1_ci_low": 0.1208888004165388,
803
+ "rouge1_ci_high": 0.13193160708995647
804
+ },
805
+ "score": 0.19810470016894333,
806
+ "score_name": "subsets_mean",
807
+ "num_of_instances": 1528
808
+ },
809
+ "translation": {
810
+ "mt_flores_101_ara_eng": {
811
+ "num_of_instances": 66,
812
+ "counts": [
813
+ 1212,
814
+ 730,
815
+ 491,
816
+ 347
817
+ ],
818
+ "totals": [
819
+ 1843,
820
+ 1777,
821
+ 1711,
822
+ 1645
823
+ ],
824
+ "precisions": [
825
+ 0.6576234400434074,
826
+ 0.41080472706809235,
827
+ 0.2869666861484512,
828
+ 0.2109422492401216
829
+ ],
830
+ "bp": 1.0,
831
+ "sys_len": 1843,
832
+ "ref_len": 1734,
833
+ "sacrebleu": 0.3576036500711557,
834
+ "score": 0.3576036500711557,
835
+ "score_name": "sacrebleu",
836
+ "score_ci_low": 0.30044978362076574,
837
+ "score_ci_high": 0.4017272348853817,
838
+ "sacrebleu_ci_low": 0.30044978362076574,
839
+ "sacrebleu_ci_high": 0.4017272348853817
840
+ },
841
+ "mt_flores_101_deu_eng": {
842
+ "num_of_instances": 66,
843
+ "counts": [
844
+ 1262,
845
+ 782,
846
+ 520,
847
+ 359
848
+ ],
849
+ "totals": [
850
+ 1809,
851
+ 1743,
852
+ 1677,
853
+ 1611
854
+ ],
855
+ "precisions": [
856
+ 0.6976229961304589,
857
+ 0.44865174985656914,
858
+ 0.31007751937984496,
859
+ 0.2228429546865301
860
+ ],
861
+ "bp": 1.0,
862
+ "sys_len": 1809,
863
+ "ref_len": 1734,
864
+ "sacrebleu": 0.3834862844151344,
865
+ "score": 0.3834862844151344,
866
+ "score_name": "sacrebleu",
867
+ "score_ci_low": 0.3416515114541037,
868
+ "score_ci_high": 0.42052556141750186,
869
+ "sacrebleu_ci_low": 0.3416515114541037,
870
+ "sacrebleu_ci_high": 0.42052556141750186
871
+ },
872
+ "mt_flores_101_eng_ara": {
873
+ "num_of_instances": 66,
874
+ "counts": [
875
+ 803,
876
+ 382,
877
+ 189,
878
+ 88
879
+ ],
880
+ "totals": [
881
+ 1643,
882
+ 1577,
883
+ 1511,
884
+ 1445
885
+ ],
886
+ "precisions": [
887
+ 0.48874010955569086,
888
+ 0.2422320862396956,
889
+ 0.12508272667107875,
890
+ 0.06089965397923876
891
+ ],
892
+ "bp": 1.0,
893
+ "sys_len": 1643,
894
+ "ref_len": 1589,
895
+ "sacrebleu": 0.17329277411747399,
896
+ "score": 0.17329277411747399,
897
+ "score_name": "sacrebleu",
898
+ "score_ci_low": 0.1458100600445785,
899
+ "score_ci_high": 0.20408033058998093,
900
+ "sacrebleu_ci_low": 0.1458100600445785,
901
+ "sacrebleu_ci_high": 0.20408033058998093
902
+ },
903
+ "mt_flores_101_eng_deu": {
904
+ "num_of_instances": 66,
905
+ "counts": [
906
+ 1126,
907
+ 615,
908
+ 367,
909
+ 220
910
+ ],
911
+ "totals": [
912
+ 1842,
913
+ 1776,
914
+ 1710,
915
+ 1644
916
+ ],
917
+ "precisions": [
918
+ 0.6112920738327905,
919
+ 0.34628378378378377,
920
+ 0.21461988304093566,
921
+ 0.13381995133819952
922
+ ],
923
+ "bp": 1.0,
924
+ "sys_len": 1842,
925
+ "ref_len": 1835,
926
+ "sacrebleu": 0.27923376694078983,
927
+ "score": 0.27923376694078983,
928
+ "score_name": "sacrebleu",
929
+ "score_ci_low": 0.24552911017678328,
930
+ "score_ci_high": 0.3170919141844367,
931
+ "sacrebleu_ci_low": 0.24552911017678328,
932
+ "sacrebleu_ci_high": 0.3170919141844367
933
+ },
934
+ "mt_flores_101_eng_fra": {
935
+ "num_of_instances": 66,
936
+ "counts": [
937
+ 1462,
938
+ 1038,
939
+ 798,
940
+ 621
941
+ ],
942
+ "totals": [
943
+ 2034,
944
+ 1968,
945
+ 1902,
946
+ 1836
947
+ ],
948
+ "precisions": [
949
+ 0.7187807276302851,
950
+ 0.5274390243902439,
951
+ 0.4195583596214511,
952
+ 0.338235294117647
953
+ ],
954
+ "bp": 0.9834231034146155,
955
+ "sys_len": 2034,
956
+ "ref_len": 2068,
957
+ "sacrebleu": 0.47362585607622854,
958
+ "score": 0.47362585607622854,
959
+ "score_name": "sacrebleu",
960
+ "score_ci_low": 0.4382806139359095,
961
+ "score_ci_high": 0.5139442284621641,
962
+ "sacrebleu_ci_low": 0.4382806139359095,
963
+ "sacrebleu_ci_high": 0.5139442284621641
964
+ },
965
+ "mt_flores_101_eng_kor": {
966
+ "num_of_instances": 66,
967
+ "counts": [
968
+ 1295,
969
+ 636,
970
+ 344,
971
+ 187
972
+ ],
973
+ "totals": [
974
+ 2472,
975
+ 2406,
976
+ 2340,
977
+ 2274
978
+ ],
979
+ "precisions": [
980
+ 0.5238673139158576,
981
+ 0.2643391521197007,
982
+ 0.147008547008547,
983
+ 0.0822339489885664
984
+ ],
985
+ "bp": 1.0,
986
+ "sys_len": 2472,
987
+ "ref_len": 2235,
988
+ "sacrebleu": 0.20227589502234433,
989
+ "score": 0.20227589502234433,
990
+ "score_name": "sacrebleu",
991
+ "score_ci_low": 0.18147140512741464,
992
+ "score_ci_high": 0.2242208833347133,
993
+ "sacrebleu_ci_low": 0.18147140512741464,
994
+ "sacrebleu_ci_high": 0.2242208833347133
995
+ },
996
+ "mt_flores_101_eng_por": {
997
+ "num_of_instances": 66,
998
+ "counts": [
999
+ 1388,
1000
+ 960,
1001
+ 703,
1002
+ 527
1003
+ ],
1004
+ "totals": [
1005
+ 1933,
1006
+ 1867,
1007
+ 1801,
1008
+ 1735
1009
+ ],
1010
+ "precisions": [
1011
+ 0.7180548370408691,
1012
+ 0.5141938939475094,
1013
+ 0.3903387007218212,
1014
+ 0.3037463976945245
1015
+ ],
1016
+ "bp": 1.0,
1017
+ "sys_len": 1933,
1018
+ "ref_len": 1916,
1019
+ "sacrebleu": 0.4574138588750708,
1020
+ "score": 0.4574138588750708,
1021
+ "score_name": "sacrebleu",
1022
+ "score_ci_low": 0.4084699042815335,
1023
+ "score_ci_high": 0.497105169359069,
1024
+ "sacrebleu_ci_low": 0.4084699042815335,
1025
+ "sacrebleu_ci_high": 0.497105169359069
1026
+ },
1027
+ "mt_flores_101_eng_ron": {
1028
+ "num_of_instances": 66,
1029
+ "counts": [
1030
+ 1308,
1031
+ 869,
1032
+ 605,
1033
+ 418
1034
+ ],
1035
+ "totals": [
1036
+ 1942,
1037
+ 1876,
1038
+ 1810,
1039
+ 1744
1040
+ ],
1041
+ "precisions": [
1042
+ 0.6735324407826982,
1043
+ 0.4632196162046908,
1044
+ 0.3342541436464089,
1045
+ 0.23967889908256879
1046
+ ],
1047
+ "bp": 0.9964019571140578,
1048
+ "sys_len": 1942,
1049
+ "ref_len": 1949,
1050
+ "sacrebleu": 0.3961845974220761,
1051
+ "score": 0.3961845974220761,
1052
+ "score_name": "sacrebleu",
1053
+ "score_ci_low": 0.35334192757696886,
1054
+ "score_ci_high": 0.44028759492619396,
1055
+ "sacrebleu_ci_low": 0.35334192757696886,
1056
+ "sacrebleu_ci_high": 0.44028759492619396
1057
+ },
1058
+ "mt_flores_101_eng_spa": {
1059
+ "num_of_instances": 66,
1060
+ "counts": [
1061
+ 1251,
1062
+ 677,
1063
+ 390,
1064
+ 223
1065
+ ],
1066
+ "totals": [
1067
+ 2028,
1068
+ 1962,
1069
+ 1896,
1070
+ 1830
1071
+ ],
1072
+ "precisions": [
1073
+ 0.6168639053254438,
1074
+ 0.34505606523955146,
1075
+ 0.20569620253164558,
1076
+ 0.12185792349726776
1077
+ ],
1078
+ "bp": 0.96607214307495,
1079
+ "sys_len": 2028,
1080
+ "ref_len": 2098,
1081
+ "sacrebleu": 0.2610954642389454,
1082
+ "score": 0.2610954642389454,
1083
+ "score_name": "sacrebleu",
1084
+ "score_ci_low": 0.22712931414581697,
1085
+ "score_ci_high": 0.288932465685729,
1086
+ "sacrebleu_ci_low": 0.22712931414581697,
1087
+ "sacrebleu_ci_high": 0.288932465685729
1088
+ },
1089
+ "mt_flores_101_fra_eng": {
1090
+ "num_of_instances": 66,
1091
+ "counts": [
1092
+ 1304,
1093
+ 839,
1094
+ 567,
1095
+ 393
1096
+ ],
1097
+ "totals": [
1098
+ 1854,
1099
+ 1788,
1100
+ 1722,
1101
+ 1656
1102
+ ],
1103
+ "precisions": [
1104
+ 0.703344120819849,
1105
+ 0.4692393736017897,
1106
+ 0.32926829268292684,
1107
+ 0.23731884057971014
1108
+ ],
1109
+ "bp": 1.0,
1110
+ "sys_len": 1854,
1111
+ "ref_len": 1734,
1112
+ "sacrebleu": 0.4007385379854121,
1113
+ "score": 0.4007385379854121,
1114
+ "score_name": "sacrebleu",
1115
+ "score_ci_low": 0.36457462981738414,
1116
+ "score_ci_high": 0.4478003676387348,
1117
+ "sacrebleu_ci_low": 0.36457462981738414,
1118
+ "sacrebleu_ci_high": 0.4478003676387348
1119
+ },
1120
+ "mt_flores_101_jpn_eng": {
1121
+ "num_of_instances": 66,
1122
+ "counts": [
1123
+ 1070,
1124
+ 538,
1125
+ 304,
1126
+ 175
1127
+ ],
1128
+ "totals": [
1129
+ 1815,
1130
+ 1749,
1131
+ 1683,
1132
+ 1617
1133
+ ],
1134
+ "precisions": [
1135
+ 0.5895316804407714,
1136
+ 0.3076043453401944,
1137
+ 0.1806298276886512,
1138
+ 0.10822510822510824
1139
+ ],
1140
+ "bp": 1.0,
1141
+ "sys_len": 1815,
1142
+ "ref_len": 1734,
1143
+ "sacrebleu": 0.24400811937037023,
1144
+ "score": 0.24400811937037023,
1145
+ "score_name": "sacrebleu",
1146
+ "score_ci_low": 0.2127585740477001,
1147
+ "score_ci_high": 0.2823993625998216,
1148
+ "sacrebleu_ci_low": 0.2127585740477001,
1149
+ "sacrebleu_ci_high": 0.2823993625998216
1150
+ },
1151
+ "mt_flores_101_kor_eng": {
1152
+ "num_of_instances": 66,
1153
+ "counts": [
1154
+ 1045,
1155
+ 498,
1156
+ 280,
1157
+ 169
1158
+ ],
1159
+ "totals": [
1160
+ 1787,
1161
+ 1721,
1162
+ 1655,
1163
+ 1589
1164
+ ],
1165
+ "precisions": [
1166
+ 0.5847789591494125,
1167
+ 0.2893666472980825,
1168
+ 0.1691842900302115,
1169
+ 0.10635619886721209
1170
+ ],
1171
+ "bp": 1.0,
1172
+ "sys_len": 1787,
1173
+ "ref_len": 1734,
1174
+ "sacrebleu": 0.23490418965892076,
1175
+ "score": 0.23490418965892076,
1176
+ "score_name": "sacrebleu",
1177
+ "score_ci_low": 0.1985487418164707,
1178
+ "score_ci_high": 0.2846511928700541,
1179
+ "sacrebleu_ci_low": 0.1985487418164707,
1180
+ "sacrebleu_ci_high": 0.2846511928700541
1181
+ },
1182
+ "mt_flores_101_por_eng": {
1183
+ "num_of_instances": 66,
1184
+ "counts": [
1185
+ 1313,
1186
+ 888,
1187
+ 630,
1188
+ 458
1189
+ ],
1190
+ "totals": [
1191
+ 1824,
1192
+ 1758,
1193
+ 1692,
1194
+ 1626
1195
+ ],
1196
+ "precisions": [
1197
+ 0.7198464912280701,
1198
+ 0.5051194539249146,
1199
+ 0.3723404255319149,
1200
+ 0.2816728167281673
1201
+ ],
1202
+ "bp": 1.0,
1203
+ "sys_len": 1824,
1204
+ "ref_len": 1734,
1205
+ "sacrebleu": 0.4419058783059329,
1206
+ "score": 0.4419058783059329,
1207
+ "score_name": "sacrebleu",
1208
+ "score_ci_low": 0.39279370658531876,
1209
+ "score_ci_high": 0.49550322461749036,
1210
+ "sacrebleu_ci_low": 0.39279370658531876,
1211
+ "sacrebleu_ci_high": 0.49550322461749036
1212
+ },
1213
+ "mt_flores_101_ron_eng": {
1214
+ "num_of_instances": 66,
1215
+ "counts": [
1216
+ 1260,
1217
+ 841,
1218
+ 596,
1219
+ 426
1220
+ ],
1221
+ "totals": [
1222
+ 1797,
1223
+ 1731,
1224
+ 1665,
1225
+ 1599
1226
+ ],
1227
+ "precisions": [
1228
+ 0.7011686143572621,
1229
+ 0.4858463316002311,
1230
+ 0.357957957957958,
1231
+ 0.26641651031894936
1232
+ ],
1233
+ "bp": 1.0,
1234
+ "sys_len": 1797,
1235
+ "ref_len": 1734,
1236
+ "sacrebleu": 0.4245497977223498,
1237
+ "score": 0.4245497977223498,
1238
+ "score_name": "sacrebleu",
1239
+ "score_ci_low": 0.377958140884616,
1240
+ "score_ci_high": 0.4821428710051954,
1241
+ "sacrebleu_ci_low": 0.377958140884616,
1242
+ "sacrebleu_ci_high": 0.4821428710051954
1243
+ },
1244
+ "mt_flores_101_spa_eng": {
1245
+ "num_of_instances": 66,
1246
+ "counts": [
1247
+ 1163,
1248
+ 621,
1249
+ 368,
1250
+ 225
1251
+ ],
1252
+ "totals": [
1253
+ 1900,
1254
+ 1834,
1255
+ 1768,
1256
+ 1702
1257
+ ],
1258
+ "precisions": [
1259
+ 0.6121052631578947,
1260
+ 0.3386041439476554,
1261
+ 0.2081447963800905,
1262
+ 0.13219741480611044
1263
+ ],
1264
+ "bp": 1.0,
1265
+ "sys_len": 1900,
1266
+ "ref_len": 1734,
1267
+ "sacrebleu": 0.27480632590419196,
1268
+ "score": 0.27480632590419196,
1269
+ "score_name": "sacrebleu",
1270
+ "score_ci_low": 0.24866198798867725,
1271
+ "score_ci_high": 0.33121893225397175,
1272
+ "sacrebleu_ci_low": 0.24866198798867725,
1273
+ "sacrebleu_ci_high": 0.33121893225397175
1274
+ },
1275
+ "score": 0.3336749997417598,
1276
+ "score_name": "subsets_mean",
1277
+ "num_of_instances": 990
1278
+ },
1279
+ "score": 0.49083000843124214,
1280
+ "score_name": "subsets_mean",
1281
+ "num_of_instances": 12472
1282
+ }
1283
+ }
results/bluebench/2025-06-21T09-36-54_evaluation_results.json ADDED
@@ -0,0 +1,1283 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "environment_info": {
3
+ "timestamp_utc": "2025-06-21T13:36:49.808623Z",
4
+ "command_line_invocation": [
5
+ "/dccstor/jbworks/miniforge3/envs/bb/bin/unitxt-evaluate",
6
+ "--tasks",
7
+ "benchmarks.bluebench",
8
+ "--model",
9
+ "cross_provider",
10
+ "--model_args",
11
+ "model_name=watsonx/meta-llama/llama-3-3-70b-instruct,max_tokens=256",
12
+ "--output_path",
13
+ "./results/bluebench",
14
+ "--log_samples",
15
+ "--trust_remote_code",
16
+ "--batch_size",
17
+ "8",
18
+ "--verbosity",
19
+ "ERROR"
20
+ ],
21
+ "parsed_arguments": {
22
+ "tasks": [
23
+ "benchmarks.bluebench"
24
+ ],
25
+ "split": "test",
26
+ "num_fewshots": null,
27
+ "limit": null,
28
+ "batch_size": 8,
29
+ "model": "watsonx/meta-llama/llama-3-3-70b-instruct",
30
+ "model_args": {
31
+ "max_tokens": 256
32
+ },
33
+ "gen_kwargs": null,
34
+ "chat_template_kwargs": null,
35
+ "output_path": "./results/bluebench",
36
+ "output_file_prefix": "evaluation_results",
37
+ "log_samples": true,
38
+ "verbosity": "ERROR",
39
+ "apply_chat_template": false,
40
+ "trust_remote_code": true,
41
+ "disable_hf_cache": false,
42
+ "cache_dir": null
43
+ },
44
+ "unitxt_version": "1.24.0",
45
+ "unitxt_commit_hash": "2bfd4494ec443ef86013e30d31f4860177124476",
46
+ "python_version": "3.10.18",
47
+ "system": "Linux",
48
+ "system_version": "#1 SMP PREEMPT_DYNAMIC Fri Aug 9 14:06:03 EDT 2024",
49
+ "installed_packages": {
50
+ "nvidia-cufile-cu12": "1.11.1.6",
51
+ "triton": "3.3.1",
52
+ "nltk": "3.9.1",
53
+ "anyio": "4.9.0",
54
+ "absl-py": "2.3.0",
55
+ "tiktoken": "0.9.0",
56
+ "charset-normalizer": "3.4.2",
57
+ "nvidia-cuda-runtime-cu12": "12.6.77",
58
+ "sympy": "1.14.0",
59
+ "mecab-ko": "1.0.1",
60
+ "litellm": "1.72.6.post1",
61
+ "httpcore": "1.0.9",
62
+ "Jinja2": "3.1.6",
63
+ "jsonschema-specifications": "2025.4.1",
64
+ "pydantic_core": "2.33.2",
65
+ "nvidia-cusparse-cu12": "12.5.4.2",
66
+ "yarl": "1.20.1",
67
+ "openai": "1.88.0",
68
+ "portalocker": "3.2.0",
69
+ "pandas": "2.3.0",
70
+ "multiprocess": "0.70.16",
71
+ "jsonschema": "4.24.0",
72
+ "unitxt": "1.24.0",
73
+ "nvidia-nvjitlink-cu12": "12.6.85",
74
+ "nvidia-cublas-cu12": "12.6.4.1",
75
+ "pydantic": "2.11.7",
76
+ "async-timeout": "5.0.1",
77
+ "annotated-types": "0.7.0",
78
+ "rouge_score": "0.1.2",
79
+ "contourpy": "1.3.2",
80
+ "aiosignal": "1.3.2",
81
+ "nvidia-cuda-cupti-cu12": "12.6.80",
82
+ "pillow": "11.2.1",
83
+ "six": "1.17.0",
84
+ "diskcache": "5.6.3",
85
+ "tqdm": "4.67.1",
86
+ "pyarrow": "20.0.0",
87
+ "h11": "0.16.0",
88
+ "zipp": "3.19.2",
89
+ "tzdata": "2025.2",
90
+ "bert-score": "0.3.13",
91
+ "setuptools": "80.9.0",
92
+ "referencing": "0.36.2",
93
+ "sacrebleu": "2.5.1",
94
+ "filelock": "3.18.0",
95
+ "urllib3": "2.5.0",
96
+ "scipy": "1.15.3",
97
+ "nvidia-nccl-cu12": "2.26.2",
98
+ "kiwisolver": "1.4.8",
99
+ "networkx": "3.4.2",
100
+ "typing-inspection": "0.4.1",
101
+ "lxml": "5.4.0",
102
+ "sniffio": "1.3.1",
103
+ "scikit-learn": "1.7.0",
104
+ "nvidia-curand-cu12": "10.3.7.77",
105
+ "pip": "25.1.1",
106
+ "fonttools": "4.58.4",
107
+ "transformers": "4.52.4",
108
+ "datasets": "3.6.0",
109
+ "nvidia-cusolver-cu12": "11.7.1.2",
110
+ "cycler": "0.12.1",
111
+ "evaluate": "0.4.3",
112
+ "distro": "1.9.0",
113
+ "idna": "3.10",
114
+ "MarkupSafe": "3.0.2",
115
+ "frozenlist": "1.7.0",
116
+ "pyparsing": "3.2.3",
117
+ "jiter": "0.10.0",
118
+ "importlib_metadata": "8.0.0",
119
+ "packaging": "24.2",
120
+ "psutil": "7.0.0",
121
+ "mecab-ko-dic": "1.0.0",
122
+ "joblib": "1.5.1",
123
+ "fsspec": "2025.3.0",
124
+ "dill": "0.3.8",
125
+ "tokenizers": "0.21.1",
126
+ "wheel": "0.45.1",
127
+ "nvidia-nvtx-cu12": "12.6.77",
128
+ "nvidia-cusparselt-cu12": "0.6.3",
129
+ "hf-xet": "1.1.4",
130
+ "propcache": "0.3.2",
131
+ "numpy": "2.2.6",
132
+ "mpmath": "1.3.0",
133
+ "multidict": "6.5.0",
134
+ "conllu": "6.0.0",
135
+ "safetensors": "0.5.3",
136
+ "requests": "2.32.4",
137
+ "regex": "2024.11.6",
138
+ "aiohttp": "3.12.13",
139
+ "tabulate": "0.9.0",
140
+ "certifi": "2025.6.15",
141
+ "accelerate": "1.8.0",
142
+ "nvidia-cufft-cu12": "11.3.0.4",
143
+ "nvidia-cuda-nvrtc-cu12": "12.6.77",
144
+ "click": "8.2.1",
145
+ "typing_extensions": "4.12.2",
146
+ "attrs": "25.3.0",
147
+ "exceptiongroup": "1.3.0",
148
+ "tenacity": "9.1.2",
149
+ "pytz": "2025.2",
150
+ "aiohappyeyeballs": "2.6.1",
151
+ "python-dateutil": "2.9.0.post0",
152
+ "torch": "2.7.1",
153
+ "python-dotenv": "1.1.0",
154
+ "httpx": "0.28.1",
155
+ "matplotlib": "3.10.3",
156
+ "xxhash": "3.5.0",
157
+ "PyYAML": "6.0.2",
158
+ "huggingface-hub": "0.33.0",
159
+ "colorama": "0.4.6",
160
+ "rpds-py": "0.25.1",
161
+ "threadpoolctl": "3.6.0",
162
+ "nvidia-cudnn-cu12": "9.5.1.17",
163
+ "jaraco.collections": "5.1.0",
164
+ "tomli": "2.0.1",
165
+ "backports.tarfile": "1.2.0",
166
+ "jaraco.context": "5.3.0",
167
+ "typeguard": "4.3.0",
168
+ "autocommand": "2.2.2",
169
+ "jaraco.text": "3.12.1",
170
+ "more-itertools": "10.3.0",
171
+ "platformdirs": "4.2.2",
172
+ "inflect": "7.3.1",
173
+ "jaraco.functools": "4.0.1"
174
+ }
175
+ },
176
+ "results": {
177
+ "bias": {
178
+ "safety_bbq_age": {
179
+ "accuracy": 0.7777777777777778,
180
+ "accuracy_ci_low": 0.6777777777777778,
181
+ "accuracy_ci_high": 0.8555555555555555,
182
+ "score_name": "accuracy",
183
+ "score": 0.7777777777777778,
184
+ "score_ci_high": 0.8555555555555555,
185
+ "score_ci_low": 0.6777777777777778,
186
+ "num_of_instances": 90
187
+ },
188
+ "safety_bbq_disability_status": {
189
+ "accuracy": 1.0,
190
+ "accuracy_ci_low": 1.0,
191
+ "accuracy_ci_high": 1.0,
192
+ "score_name": "accuracy",
193
+ "score": 1.0,
194
+ "score_ci_high": 1.0,
195
+ "score_ci_low": 1.0,
196
+ "num_of_instances": 90
197
+ },
198
+ "safety_bbq_gender_identity": {
199
+ "accuracy": 1.0,
200
+ "accuracy_ci_low": 1.0,
201
+ "accuracy_ci_high": 1.0,
202
+ "score_name": "accuracy",
203
+ "score": 1.0,
204
+ "score_ci_high": 1.0,
205
+ "score_ci_low": 1.0,
206
+ "num_of_instances": 90
207
+ },
208
+ "safety_bbq_nationality": {
209
+ "accuracy": 1.0,
210
+ "accuracy_ci_low": 1.0,
211
+ "accuracy_ci_high": 1.0,
212
+ "score_name": "accuracy",
213
+ "score": 1.0,
214
+ "score_ci_high": 1.0,
215
+ "score_ci_low": 1.0,
216
+ "num_of_instances": 90
217
+ },
218
+ "safety_bbq_physical_appearance": {
219
+ "accuracy": 1.0,
220
+ "accuracy_ci_low": 1.0,
221
+ "accuracy_ci_high": 1.0,
222
+ "score_name": "accuracy",
223
+ "score": 1.0,
224
+ "score_ci_high": 1.0,
225
+ "score_ci_low": 1.0,
226
+ "num_of_instances": 90
227
+ },
228
+ "safety_bbq_race_ethnicity": {
229
+ "accuracy": 1.0,
230
+ "accuracy_ci_low": 1.0,
231
+ "accuracy_ci_high": 1.0,
232
+ "score_name": "accuracy",
233
+ "score": 1.0,
234
+ "score_ci_high": 1.0,
235
+ "score_ci_low": 1.0,
236
+ "num_of_instances": 90
237
+ },
238
+ "safety_bbq_race_x_gender": {
239
+ "accuracy": 1.0,
240
+ "accuracy_ci_low": 1.0,
241
+ "accuracy_ci_high": 1.0,
242
+ "score_name": "accuracy",
243
+ "score": 1.0,
244
+ "score_ci_high": 1.0,
245
+ "score_ci_low": 1.0,
246
+ "num_of_instances": 90
247
+ },
248
+ "safety_bbq_race_x_ses": {
249
+ "accuracy": 0.9888888888888889,
250
+ "accuracy_ci_low": 0.9407763312346947,
251
+ "accuracy_ci_high": 1.0,
252
+ "score_name": "accuracy",
253
+ "score": 0.9888888888888889,
254
+ "score_ci_high": 1.0,
255
+ "score_ci_low": 0.9407763312346947,
256
+ "num_of_instances": 90
257
+ },
258
+ "safety_bbq_religion": {
259
+ "accuracy": 0.9777777777777777,
260
+ "accuracy_ci_low": 0.9333333333333333,
261
+ "accuracy_ci_high": 1.0,
262
+ "score_name": "accuracy",
263
+ "score": 0.9777777777777777,
264
+ "score_ci_high": 1.0,
265
+ "score_ci_low": 0.9333333333333333,
266
+ "num_of_instances": 90
267
+ },
268
+ "safety_bbq_ses": {
269
+ "accuracy": 0.9888888888888889,
270
+ "accuracy_ci_low": 0.9283857779145438,
271
+ "accuracy_ci_high": 1.0,
272
+ "score_name": "accuracy",
273
+ "score": 0.9888888888888889,
274
+ "score_ci_high": 1.0,
275
+ "score_ci_low": 0.9283857779145438,
276
+ "num_of_instances": 90
277
+ },
278
+ "safety_bbq_sexual_orientation": {
279
+ "accuracy": 0.8666666666666667,
280
+ "accuracy_ci_low": 0.7888888888888889,
281
+ "accuracy_ci_high": 0.9333333333333333,
282
+ "score_name": "accuracy",
283
+ "score": 0.8666666666666667,
284
+ "score_ci_high": 0.9333333333333333,
285
+ "score_ci_low": 0.7888888888888889,
286
+ "num_of_instances": 90
287
+ },
288
+ "score": 0.9636363636363636,
289
+ "score_name": "subsets_mean",
290
+ "num_of_instances": 990
291
+ },
292
+ "chatbot_abilities": {
293
+ "arena_hard_generation_english_gpt_4_0314_reference": {
294
+ "num_of_instances": 500,
295
+ "llama_3_70b_instruct_template_arena_hard": 0.5,
296
+ "score": 0.5,
297
+ "score_name": "llama_3_70b_instruct_template_arena_hard"
298
+ },
299
+ "score": 0.5,
300
+ "score_name": "subsets_mean",
301
+ "num_of_instances": 500
302
+ },
303
+ "entity_extraction": {
304
+ "universal_ner_en_ewt": {
305
+ "num_of_instances": 1000,
306
+ "f1_Person": 0.5744680851063829,
307
+ "f1_Organization": 0.3625,
308
+ "f1_Location": 0.3764705882352941,
309
+ "f1_macro": 0.4378128911138923,
310
+ "recall_macro": 0.39500837147693896,
311
+ "precision_macro": 0.49432322991751926,
312
+ "in_classes_support": 0.6292466765140325,
313
+ "f1_micro": 0.3560732113144759,
314
+ "recall_micro": 0.4076190476190476,
315
+ "precision_micro": 0.31610044313146235,
316
+ "score": 0.3560732113144759,
317
+ "score_name": "f1_micro",
318
+ "score_ci_low": 0.31585267626167035,
319
+ "score_ci_high": 0.4082105948099384,
320
+ "f1_micro_ci_low": 0.31585267626167035,
321
+ "f1_micro_ci_high": 0.4082105948099384
322
+ },
323
+ "score": 0.3560732113144759,
324
+ "score_name": "subsets_mean",
325
+ "num_of_instances": 1000
326
+ },
327
+ "knowledge": {
328
+ "mmlu_pro_biology": {
329
+ "accuracy": 0.704225352112676,
330
+ "accuracy_ci_low": 0.5915492957746479,
331
+ "accuracy_ci_high": 0.8028169014084507,
332
+ "score_name": "accuracy",
333
+ "score": 0.704225352112676,
334
+ "score_ci_high": 0.8028169014084507,
335
+ "score_ci_low": 0.5915492957746479,
336
+ "num_of_instances": 71
337
+ },
338
+ "mmlu_pro_business": {
339
+ "accuracy": 0.19718309859154928,
340
+ "accuracy_ci_low": 0.11267605633802817,
341
+ "accuracy_ci_high": 0.30985915492957744,
342
+ "score_name": "accuracy",
343
+ "score": 0.19718309859154928,
344
+ "score_ci_high": 0.30985915492957744,
345
+ "score_ci_low": 0.11267605633802817,
346
+ "num_of_instances": 71
347
+ },
348
+ "mmlu_pro_chemistry": {
349
+ "accuracy": 0.1267605633802817,
350
+ "accuracy_ci_low": 0.056338028169014086,
351
+ "accuracy_ci_high": 0.22535211267605634,
352
+ "score_name": "accuracy",
353
+ "score": 0.1267605633802817,
354
+ "score_ci_high": 0.22535211267605634,
355
+ "score_ci_low": 0.056338028169014086,
356
+ "num_of_instances": 71
357
+ },
358
+ "mmlu_pro_computer_science": {
359
+ "accuracy": 0.6056338028169014,
360
+ "accuracy_ci_low": 0.49295774647887325,
361
+ "accuracy_ci_high": 0.704225352112676,
362
+ "score_name": "accuracy",
363
+ "score": 0.6056338028169014,
364
+ "score_ci_high": 0.704225352112676,
365
+ "score_ci_low": 0.49295774647887325,
366
+ "num_of_instances": 71
367
+ },
368
+ "mmlu_pro_economics": {
369
+ "accuracy": 0.647887323943662,
370
+ "accuracy_ci_low": 0.5211267605633803,
371
+ "accuracy_ci_high": 0.7605633802816901,
372
+ "score_name": "accuracy",
373
+ "score": 0.647887323943662,
374
+ "score_ci_high": 0.7605633802816901,
375
+ "score_ci_low": 0.5211267605633803,
376
+ "num_of_instances": 71
377
+ },
378
+ "mmlu_pro_engineering": {
379
+ "accuracy": 0.323943661971831,
380
+ "accuracy_ci_low": 0.2112676056338028,
381
+ "accuracy_ci_high": 0.43661971830985913,
382
+ "score_name": "accuracy",
383
+ "score": 0.323943661971831,
384
+ "score_ci_high": 0.43661971830985913,
385
+ "score_ci_low": 0.2112676056338028,
386
+ "num_of_instances": 71
387
+ },
388
+ "mmlu_pro_health": {
389
+ "accuracy": 0.5492957746478874,
390
+ "accuracy_ci_low": 0.43661971830985913,
391
+ "accuracy_ci_high": 0.6619718309859155,
392
+ "score_name": "accuracy",
393
+ "score": 0.5492957746478874,
394
+ "score_ci_high": 0.6619718309859155,
395
+ "score_ci_low": 0.43661971830985913,
396
+ "num_of_instances": 71
397
+ },
398
+ "mmlu_pro_history": {
399
+ "accuracy": 0.6901408450704225,
400
+ "accuracy_ci_low": 0.5774647887323944,
401
+ "accuracy_ci_high": 0.7887323943661971,
402
+ "score_name": "accuracy",
403
+ "score": 0.6901408450704225,
404
+ "score_ci_high": 0.7887323943661971,
405
+ "score_ci_low": 0.5774647887323944,
406
+ "num_of_instances": 71
407
+ },
408
+ "mmlu_pro_law": {
409
+ "accuracy": 0.5774647887323944,
410
+ "accuracy_ci_low": 0.4647887323943662,
411
+ "accuracy_ci_high": 0.6901408450704225,
412
+ "score_name": "accuracy",
413
+ "score": 0.5774647887323944,
414
+ "score_ci_high": 0.6901408450704225,
415
+ "score_ci_low": 0.4647887323943662,
416
+ "num_of_instances": 71
417
+ },
418
+ "mmlu_pro_math": {
419
+ "accuracy": 0.29577464788732394,
420
+ "accuracy_ci_low": 0.2112676056338028,
421
+ "accuracy_ci_high": 0.415768498221757,
422
+ "score_name": "accuracy",
423
+ "score": 0.29577464788732394,
424
+ "score_ci_high": 0.415768498221757,
425
+ "score_ci_low": 0.2112676056338028,
426
+ "num_of_instances": 71
427
+ },
428
+ "mmlu_pro_other": {
429
+ "accuracy": 0.49295774647887325,
430
+ "accuracy_ci_low": 0.38028169014084506,
431
+ "accuracy_ci_high": 0.6056338028169014,
432
+ "score_name": "accuracy",
433
+ "score": 0.49295774647887325,
434
+ "score_ci_high": 0.6056338028169014,
435
+ "score_ci_low": 0.38028169014084506,
436
+ "num_of_instances": 71
437
+ },
438
+ "mmlu_pro_philosophy": {
439
+ "accuracy": 0.5915492957746479,
440
+ "accuracy_ci_low": 0.4647887323943662,
441
+ "accuracy_ci_high": 0.6901408450704225,
442
+ "score_name": "accuracy",
443
+ "score": 0.5915492957746479,
444
+ "score_ci_high": 0.6901408450704225,
445
+ "score_ci_low": 0.4647887323943662,
446
+ "num_of_instances": 71
447
+ },
448
+ "mmlu_pro_physics": {
449
+ "accuracy": 0.3380281690140845,
450
+ "accuracy_ci_low": 0.22535211267605634,
451
+ "accuracy_ci_high": 0.4507042253521127,
452
+ "score_name": "accuracy",
453
+ "score": 0.3380281690140845,
454
+ "score_ci_high": 0.4507042253521127,
455
+ "score_ci_low": 0.22535211267605634,
456
+ "num_of_instances": 71
457
+ },
458
+ "mmlu_pro_psychology": {
459
+ "accuracy": 0.704225352112676,
460
+ "accuracy_ci_low": 0.5915492957746479,
461
+ "accuracy_ci_high": 0.8028169014084507,
462
+ "score_name": "accuracy",
463
+ "score": 0.704225352112676,
464
+ "score_ci_high": 0.8028169014084507,
465
+ "score_ci_low": 0.5915492957746479,
466
+ "num_of_instances": 71
467
+ },
468
+ "score": 0.48893360160965793,
469
+ "score_name": "subsets_mean",
470
+ "num_of_instances": 994
471
+ },
472
+ "legal": {
473
+ "legalbench_abercrombie": {
474
+ "f1_macro": 0.6576598179941833,
475
+ "f1_suggestive": 0.5882352941176471,
476
+ "f1_generic": 0.72,
477
+ "f1_descriptive": 0.6818181818181818,
478
+ "f1_fanciful": 0.6666666666666666,
479
+ "f1_arbitrary": 0.631578947368421,
480
+ "f1_macro_ci_low": 0.5539989867710009,
481
+ "f1_macro_ci_high": 0.7630121059181559,
482
+ "score_name": "f1_micro",
483
+ "score": 0.6547619047619048,
484
+ "score_ci_high": 0.75,
485
+ "score_ci_low": 0.5476190476190477,
486
+ "num_of_instances": 85,
487
+ "accuracy": 0.6470588235294118,
488
+ "accuracy_ci_low": 0.5411764705882353,
489
+ "accuracy_ci_high": 0.7411764705882353,
490
+ "f1_micro": 0.6547619047619048,
491
+ "f1_micro_ci_low": 0.5476190476190477,
492
+ "f1_micro_ci_high": 0.75
493
+ },
494
+ "legalbench_corporate_lobbying": {
495
+ "f1_macro": 0.5644255319148936,
496
+ "f1_no": 0.6808510638297872,
497
+ "f1_yes": 0.448,
498
+ "f1_macro_ci_low": 0.4955233440030641,
499
+ "f1_macro_ci_high": 0.637195719539535,
500
+ "score_name": "f1_micro",
501
+ "score": 0.6,
502
+ "score_ci_high": 0.6666666666666666,
503
+ "score_ci_low": 0.5292645910446814,
504
+ "num_of_instances": 200,
505
+ "accuracy": 0.54,
506
+ "accuracy_ci_low": 0.47,
507
+ "accuracy_ci_high": 0.605,
508
+ "f1_micro": 0.6,
509
+ "f1_micro_ci_low": 0.5292645910446814,
510
+ "f1_micro_ci_high": 0.6666666666666666
511
+ },
512
+ "legalbench_function_of_decision_section": {
513
+ "f1_macro": 0.26417086507814297,
514
+ "f1_conclusion": 0.15384615384615385,
515
+ "f1_decree": 0.13333333333333333,
516
+ "f1_issue": 0.18181818181818182,
517
+ "f1_analysis": 0.5633802816901409,
518
+ "f1_facts": 0.13793103448275862,
519
+ "f1_procedural history": 0.3384615384615385,
520
+ "f1_rule": 0.3404255319148936,
521
+ "f1_macro_ci_low": 0.2096003414946908,
522
+ "f1_macro_ci_high": 0.3335047284451988,
523
+ "score_name": "f1_micro",
524
+ "score": 0.3076923076923077,
525
+ "score_ci_high": 0.37785141927318056,
526
+ "score_ci_low": 0.24264738787974433,
527
+ "num_of_instances": 200,
528
+ "accuracy": 0.25,
529
+ "accuracy_ci_low": 0.195,
530
+ "accuracy_ci_high": 0.315,
531
+ "f1_micro": 0.3076923076923077,
532
+ "f1_micro_ci_low": 0.24264738787974433,
533
+ "f1_micro_ci_high": 0.37785141927318056
534
+ },
535
+ "legalbench_international_citizenship_questions": {
536
+ "f1_macro": 0.463898065125059,
537
+ "f1_yes": 0.4662576687116564,
538
+ "f1_no": 0.46153846153846156,
539
+ "f1_macro_ci_low": 0.39561838155337925,
540
+ "f1_macro_ci_high": 0.5401290279855109,
541
+ "score_name": "f1_micro",
542
+ "score": 0.463855421686747,
543
+ "score_ci_high": 0.5388200914855517,
544
+ "score_ci_low": 0.3937587086040863,
545
+ "num_of_instances": 200,
546
+ "accuracy": 0.385,
547
+ "accuracy_ci_low": 0.32,
548
+ "accuracy_ci_high": 0.455,
549
+ "f1_micro": 0.463855421686747,
550
+ "f1_micro_ci_low": 0.3937587086040863,
551
+ "f1_micro_ci_high": 0.5388200914855517
552
+ },
553
+ "legalbench_proa": {
554
+ "f1_macro": 0.75,
555
+ "f1_yes": 0.75,
556
+ "f1_no": 0.75,
557
+ "f1_macro_ci_low": 0.6597413488150221,
558
+ "f1_macro_ci_high": 0.8234621193211324,
559
+ "score_name": "f1_micro",
560
+ "score": 0.75,
561
+ "score_ci_high": 0.8234882632928148,
562
+ "score_ci_low": 0.6573326079878734,
563
+ "num_of_instances": 85,
564
+ "accuracy": 0.6352941176470588,
565
+ "accuracy_ci_low": 0.5411764705882353,
566
+ "accuracy_ci_high": 0.7294117647058823,
567
+ "f1_micro": 0.75,
568
+ "f1_micro_ci_low": 0.6573326079878734,
569
+ "f1_micro_ci_high": 0.8234882632928148
570
+ },
571
+ "score": 0.5552619268281919,
572
+ "score_name": "subsets_mean",
573
+ "num_of_instances": 770
574
+ },
575
+ "news_classification": {
576
+ "20_newsgroups_short": {
577
+ "f1_macro": 0.6134758195135697,
578
+ "f1_cars": 0.8297872340425532,
579
+ "f1_windows x": 0.14492753623188406,
580
+ "f1_computer graphics": 0.41379310344827586,
581
+ "f1_atheism": 0.48148148148148145,
582
+ "f1_religion": 0.24561403508771928,
583
+ "f1_medicine": 0.8372093023255814,
584
+ "f1_christianity": 0.7878787878787878,
585
+ "f1_microsoft windows": 0.611764705882353,
586
+ "f1_middle east": 0.5970149253731343,
587
+ "f1_motorcycles": 0.7128712871287128,
588
+ "f1_pc hardware": 0.5736434108527132,
589
+ "f1_mac hardware": 0.6382978723404256,
590
+ "f1_electronics": 0.6436781609195402,
591
+ "f1_for sale": 0.7027027027027027,
592
+ "f1_guns": 0.34375,
593
+ "f1_space": 0.8269230769230769,
594
+ "f1_cryptography": 0.6486486486486487,
595
+ "f1_baseball": 0.9122807017543859,
596
+ "f1_politics": 0.3787878787878788,
597
+ "f1_hockey": 0.9384615384615385,
598
+ "f1_macro_ci_low": 0.5870232871618599,
599
+ "f1_macro_ci_high": 0.6399401092520309,
600
+ "score_name": "f1_micro",
601
+ "score": 0.6360655737704918,
602
+ "score_ci_high": 0.6652078896587189,
603
+ "score_ci_low": 0.606009312496833,
604
+ "num_of_instances": 1000,
605
+ "accuracy": 0.582,
606
+ "accuracy_ci_low": 0.549,
607
+ "accuracy_ci_high": 0.61,
608
+ "f1_micro": 0.6360655737704918,
609
+ "f1_micro_ci_low": 0.606009312496833,
610
+ "f1_micro_ci_high": 0.6652078896587189
611
+ },
612
+ "score": 0.6360655737704918,
613
+ "score_name": "subsets_mean",
614
+ "num_of_instances": 1000
615
+ },
616
+ "product_help": {
617
+ "cfpb_product_2023": {
618
+ "f1_macro": 0.7310250832696968,
619
+ "f1_credit reporting or credit repair services or other personal consumer reports": 0.9301994301994302,
620
+ "f1_payday loan or title loan or personal loan": 0.2222222222222222,
621
+ "f1_mortgage": 0.8450704225352113,
622
+ "f1_credit card or prepaid card": 0.7596899224806202,
623
+ "f1_debt collection": 0.6887417218543046,
624
+ "f1_vehicle loan or lease": 0.7906976744186046,
625
+ "f1_checking or savings account": 0.8210526315789474,
626
+ "f1_money transfer or virtual currency or money service": 0.625,
627
+ "f1_student loan": 0.896551724137931,
628
+ "f1_macro_ci_low": 0.6859846742091775,
629
+ "f1_macro_ci_high": 0.7782039487360795,
630
+ "score_name": "f1_micro",
631
+ "score": 0.8752515090543259,
632
+ "score_ci_high": 0.8950697745652848,
633
+ "score_ci_low": 0.8535042875641251,
634
+ "num_of_instances": 1000,
635
+ "accuracy": 0.87,
636
+ "accuracy_ci_low": 0.848,
637
+ "accuracy_ci_high": 0.8900650893159325,
638
+ "f1_micro": 0.8752515090543259,
639
+ "f1_micro_ci_low": 0.8535042875641251,
640
+ "f1_micro_ci_high": 0.8950697745652848
641
+ },
642
+ "cfpb_product_watsonx": {
643
+ "f1_macro": 0.7446539429987555,
644
+ "f1_mortgages and loans": 0.8,
645
+ "f1_credit card": 0.7865168539325843,
646
+ "f1_debt collection": 0.6972477064220184,
647
+ "f1_credit reporting": 0.7835051546391752,
648
+ "f1_retail banking": 0.656,
649
+ "f1_macro_ci_low": 0.7076662872810797,
650
+ "f1_macro_ci_high": 0.7839689190392729,
651
+ "score_name": "f1_micro",
652
+ "score": 0.75177304964539,
653
+ "score_ci_high": 0.7885153719683515,
654
+ "score_ci_low": 0.7144890867826504,
655
+ "num_of_instances": 500,
656
+ "accuracy": 0.742,
657
+ "accuracy_ci_low": 0.7044918597766052,
658
+ "accuracy_ci_high": 0.78,
659
+ "f1_micro": 0.75177304964539,
660
+ "f1_micro_ci_low": 0.7144890867826504,
661
+ "f1_micro_ci_high": 0.7885153719683515
662
+ },
663
+ "score": 0.8135122793498579,
664
+ "score_name": "subsets_mean",
665
+ "num_of_instances": 1500
666
+ },
667
+ "qa_finance": {
668
+ "fin_qa": {
669
+ "num_of_instances": 1000,
670
+ "program_accuracy": 0.206,
671
+ "score": 0.206,
672
+ "score_name": "program_accuracy",
673
+ "execution_accuracy": 0.183,
674
+ "program_accuracy_ci_low": 0.18192581131748398,
675
+ "program_accuracy_ci_high": 0.233,
676
+ "score_ci_low": 0.18192581131748398,
677
+ "score_ci_high": 0.233,
678
+ "execution_accuracy_ci_low": 0.16,
679
+ "execution_accuracy_ci_high": 0.206
680
+ },
681
+ "score": 0.206,
682
+ "score_name": "subsets_mean",
683
+ "num_of_instances": 1000
684
+ },
685
+ "rag_general": {
686
+ "rag_response_generation_clapnq": {
687
+ "precision": 0.2829195176672266,
688
+ "recall": 0.5895069651787405,
689
+ "f1": 0.3247642449571121,
690
+ "precision_ci_low": 0.2632314020521836,
691
+ "precision_ci_high": 0.3018747287495355,
692
+ "recall_ci_low": 0.5757048544050242,
693
+ "recall_ci_high": 0.605461197768975,
694
+ "f1_ci_low": 0.3070149800069332,
695
+ "f1_ci_high": 0.3415261837814938,
696
+ "score_name": "f1",
697
+ "score": 0.3247642449571121,
698
+ "score_ci_high": 0.3415261837814938,
699
+ "score_ci_low": 0.3070149800069332,
700
+ "num_of_instances": 600,
701
+ "correctness_f1_bert_score.deberta_large_mnli": 0.5840552796920141,
702
+ "correctness_recall_bert_score.deberta_large_mnli": 0.6764627607663473,
703
+ "correctness_precision_bert_score.deberta_large_mnli": 0.5322982901086409,
704
+ "faithfullness_f1_token_overlap": 0.3161988054051969,
705
+ "faithfullness_recall_token_overlap": 0.2527973404819663,
706
+ "faithfullness_precision_token_overlap": 0.5297932448406707,
707
+ "correctness_f1_token_overlap": 0.3247642449571121,
708
+ "correctness_recall_token_overlap": 0.5895069651787405,
709
+ "correctness_precision_token_overlap": 0.2829195176672266
710
+ },
711
+ "score": 0.3247642449571121,
712
+ "score_name": "subsets_mean",
713
+ "num_of_instances": 600
714
+ },
715
+ "reasoning": {
716
+ "hellaswag": {
717
+ "accuracy": 0.568,
718
+ "accuracy_ci_low": 0.536,
719
+ "accuracy_ci_high": 0.597,
720
+ "score_name": "accuracy",
721
+ "score": 0.568,
722
+ "score_ci_high": 0.597,
723
+ "score_ci_low": 0.536,
724
+ "num_of_instances": 1000
725
+ },
726
+ "openbook_qa": {
727
+ "accuracy": 0.922,
728
+ "accuracy_ci_low": 0.8937961961124387,
729
+ "accuracy_ci_high": 0.9425849640401368,
730
+ "score_name": "accuracy",
731
+ "score": 0.922,
732
+ "score_ci_high": 0.9425849640401368,
733
+ "score_ci_low": 0.8937961961124387,
734
+ "num_of_instances": 500
735
+ },
736
+ "score": 0.745,
737
+ "score_name": "subsets_mean",
738
+ "num_of_instances": 1500
739
+ },
740
+ "safety": {
741
+ "attaq_500": {
742
+ "severity_critical": 0.0,
743
+ "severity_high": 0.0,
744
+ "severity_medium": 1.0,
745
+ "severity_low": 99.0,
746
+ "category_harmful_info": 0.8334540272453089,
747
+ "category_deception": 0.8512123076602666,
748
+ "category_explicit_content": 0.8684844600431846,
749
+ "category_violence": 0.8207998569862591,
750
+ "category_discrimination": 0.7722197345324925,
751
+ "category_substance_abuse": 0.8600103222661548,
752
+ "category_pii": 0.8025329068855003,
753
+ "safety": 0.8309456067584041,
754
+ "safety_ci_low": 0.812541088262446,
755
+ "safety_ci_high": 0.8500828095785149,
756
+ "score_name": "safety",
757
+ "score": 0.8309456067584041,
758
+ "score_ci_high": 0.8500828095785149,
759
+ "score_ci_low": 0.812541088262446,
760
+ "num_of_instances": 100
761
+ },
762
+ "score": 0.8309456067584041,
763
+ "score_name": "subsets_mean",
764
+ "num_of_instances": 100
765
+ },
766
+ "summarization": {
767
+ "billsum_document_filtered_to_6000_chars": {
768
+ "num_of_instances": 528,
769
+ "rouge1": 0.4363286082688575,
770
+ "rouge2": 0.22072608165229177,
771
+ "rougeL": 0.3067201488969288,
772
+ "score": 0.3067201488969288,
773
+ "score_name": "rougeL",
774
+ "rougeLsum": 0.37348424474826925,
775
+ "rouge1_ci_low": 0.42622540172251744,
776
+ "rouge1_ci_high": 0.445479768051287,
777
+ "rouge2_ci_low": 0.2133469390728694,
778
+ "rouge2_ci_high": 0.22852486335907615,
779
+ "rougeL_ci_low": 0.2990472341490495,
780
+ "rougeL_ci_high": 0.31469167779345575,
781
+ "score_ci_low": 0.2990472341490495,
782
+ "score_ci_high": 0.31469167779345575,
783
+ "rougeLsum_ci_low": 0.36439724635298765,
784
+ "rougeLsum_ci_high": 0.3821672398567385
785
+ },
786
+ "tldr_document_filtered_to_6000_chars": {
787
+ "num_of_instances": 1000,
788
+ "rouge1": 0.13281253260941045,
789
+ "rouge2": 0.020216836126571446,
790
+ "rougeL": 0.0953008337162679,
791
+ "score": 0.0953008337162679,
792
+ "score_name": "rougeL",
793
+ "rougeLsum": 0.10871871026938495,
794
+ "rouge1_ci_low": 0.1270654167753335,
795
+ "rouge1_ci_high": 0.13798234251944885,
796
+ "rouge2_ci_low": 0.01830191124480353,
797
+ "rouge2_ci_high": 0.022113898574922495,
798
+ "rougeL_ci_low": 0.09124510500356316,
799
+ "rougeL_ci_high": 0.098681444438937,
800
+ "score_ci_low": 0.09124510500356316,
801
+ "score_ci_high": 0.098681444438937,
802
+ "rougeLsum_ci_low": 0.10412203835097511,
803
+ "rougeLsum_ci_high": 0.11266943648159043
804
+ },
805
+ "score": 0.20101049130659834,
806
+ "score_name": "subsets_mean",
807
+ "num_of_instances": 1528
808
+ },
809
+ "translation": {
810
+ "mt_flores_101_ara_eng": {
811
+ "num_of_instances": 66,
812
+ "counts": [
813
+ 1297,
814
+ 853,
815
+ 602,
816
+ 432
817
+ ],
818
+ "totals": [
819
+ 1773,
820
+ 1707,
821
+ 1641,
822
+ 1575
823
+ ],
824
+ "precisions": [
825
+ 0.7315284827975184,
826
+ 0.4997070884592853,
827
+ 0.3668494820231566,
828
+ 0.27428571428571424
829
+ ],
830
+ "bp": 1.0,
831
+ "sys_len": 1773,
832
+ "ref_len": 1734,
833
+ "sacrebleu": 0.4379348051104114,
834
+ "score": 0.4379348051104114,
835
+ "score_name": "sacrebleu",
836
+ "score_ci_low": 0.3839572779473718,
837
+ "score_ci_high": 0.47542268740294724,
838
+ "sacrebleu_ci_low": 0.3839572779473718,
839
+ "sacrebleu_ci_high": 0.47542268740294724
840
+ },
841
+ "mt_flores_101_deu_eng": {
842
+ "num_of_instances": 66,
843
+ "counts": [
844
+ 1336,
845
+ 889,
846
+ 624,
847
+ 446
848
+ ],
849
+ "totals": [
850
+ 1793,
851
+ 1727,
852
+ 1661,
853
+ 1595
854
+ ],
855
+ "precisions": [
856
+ 0.7451199107640825,
857
+ 0.5147654892877823,
858
+ 0.37567730282962075,
859
+ 0.27962382445141065
860
+ ],
861
+ "bp": 1.0,
862
+ "sys_len": 1793,
863
+ "ref_len": 1734,
864
+ "sacrebleu": 0.4480290559468694,
865
+ "score": 0.4480290559468694,
866
+ "score_name": "sacrebleu",
867
+ "score_ci_low": 0.40758551741171895,
868
+ "score_ci_high": 0.49424014474329564,
869
+ "sacrebleu_ci_low": 0.40758551741171895,
870
+ "sacrebleu_ci_high": 0.49424014474329564
871
+ },
872
+ "mt_flores_101_eng_ara": {
873
+ "num_of_instances": 66,
874
+ "counts": [
875
+ 908,
876
+ 498,
877
+ 301,
878
+ 181
879
+ ],
880
+ "totals": [
881
+ 1600,
882
+ 1534,
883
+ 1468,
884
+ 1402
885
+ ],
886
+ "precisions": [
887
+ 0.5675,
888
+ 0.3246414602346806,
889
+ 0.20504087193460488,
890
+ 0.1291012838801712
891
+ ],
892
+ "bp": 1.0,
893
+ "sys_len": 1600,
894
+ "ref_len": 1589,
895
+ "sacrebleu": 0.26426230123240096,
896
+ "score": 0.26426230123240096,
897
+ "score_name": "sacrebleu",
898
+ "score_ci_low": 0.2206088677339497,
899
+ "score_ci_high": 0.3043911445742578,
900
+ "sacrebleu_ci_low": 0.2206088677339497,
901
+ "sacrebleu_ci_high": 0.3043911445742578
902
+ },
903
+ "mt_flores_101_eng_deu": {
904
+ "num_of_instances": 66,
905
+ "counts": [
906
+ 1252,
907
+ 762,
908
+ 513,
909
+ 371
910
+ ],
911
+ "totals": [
912
+ 1850,
913
+ 1784,
914
+ 1718,
915
+ 1652
916
+ ],
917
+ "precisions": [
918
+ 0.6767567567567567,
919
+ 0.42713004484304934,
920
+ 0.29860302677532014,
921
+ 0.2245762711864407
922
+ ],
923
+ "bp": 1.0,
924
+ "sys_len": 1850,
925
+ "ref_len": 1835,
926
+ "sacrebleu": 0.37313217401540816,
927
+ "score": 0.37313217401540816,
928
+ "score_name": "sacrebleu",
929
+ "score_ci_low": 0.3292059035458714,
930
+ "score_ci_high": 0.4247888842695035,
931
+ "sacrebleu_ci_low": 0.3292059035458714,
932
+ "sacrebleu_ci_high": 0.4247888842695035
933
+ },
934
+ "mt_flores_101_eng_fra": {
935
+ "num_of_instances": 66,
936
+ "counts": [
937
+ 1553,
938
+ 1160,
939
+ 918,
940
+ 734
941
+ ],
942
+ "totals": [
943
+ 2035,
944
+ 1969,
945
+ 1903,
946
+ 1837
947
+ ],
948
+ "precisions": [
949
+ 0.763144963144963,
950
+ 0.5891315388522093,
951
+ 0.4823962165002627,
952
+ 0.3995645073489385
953
+ ],
954
+ "bp": 0.9839145587719164,
955
+ "sys_len": 2035,
956
+ "ref_len": 2068,
957
+ "sacrebleu": 0.5338385284906572,
958
+ "score": 0.5338385284906572,
959
+ "score_name": "sacrebleu",
960
+ "score_ci_low": 0.4960508343377122,
961
+ "score_ci_high": 0.5778992367091751,
962
+ "sacrebleu_ci_low": 0.4960508343377122,
963
+ "sacrebleu_ci_high": 0.5778992367091751
964
+ },
965
+ "mt_flores_101_eng_kor": {
966
+ "num_of_instances": 66,
967
+ "counts": [
968
+ 1322,
969
+ 700,
970
+ 421,
971
+ 259
972
+ ],
973
+ "totals": [
974
+ 2388,
975
+ 2322,
976
+ 2256,
977
+ 2190
978
+ ],
979
+ "precisions": [
980
+ 0.5536013400335008,
981
+ 0.301464254952627,
982
+ 0.18661347517730498,
983
+ 0.1182648401826484
984
+ ],
985
+ "bp": 1.0,
986
+ "sys_len": 2388,
987
+ "ref_len": 2235,
988
+ "sacrebleu": 0.2463530263425343,
989
+ "score": 0.2463530263425343,
990
+ "score_name": "sacrebleu",
991
+ "score_ci_low": 0.21902218576525906,
992
+ "score_ci_high": 0.28268262467368716,
993
+ "sacrebleu_ci_low": 0.21902218576525906,
994
+ "sacrebleu_ci_high": 0.28268262467368716
995
+ },
996
+ "mt_flores_101_eng_por": {
997
+ "num_of_instances": 66,
998
+ "counts": [
999
+ 1463,
1000
+ 1077,
1001
+ 833,
1002
+ 645
1003
+ ],
1004
+ "totals": [
1005
+ 1890,
1006
+ 1824,
1007
+ 1758,
1008
+ 1692
1009
+ ],
1010
+ "precisions": [
1011
+ 0.774074074074074,
1012
+ 0.5904605263157895,
1013
+ 0.47383390216154725,
1014
+ 0.3812056737588652
1015
+ ],
1016
+ "bp": 0.9863375760488048,
1017
+ "sys_len": 1890,
1018
+ "ref_len": 1916,
1019
+ "sacrebleu": 0.5287072892340219,
1020
+ "score": 0.5287072892340219,
1021
+ "score_name": "sacrebleu",
1022
+ "score_ci_low": 0.47933696238952217,
1023
+ "score_ci_high": 0.5714963999565859,
1024
+ "sacrebleu_ci_low": 0.47933696238952217,
1025
+ "sacrebleu_ci_high": 0.5714963999565859
1026
+ },
1027
+ "mt_flores_101_eng_ron": {
1028
+ "num_of_instances": 66,
1029
+ "counts": [
1030
+ 1402,
1031
+ 981,
1032
+ 716,
1033
+ 523
1034
+ ],
1035
+ "totals": [
1036
+ 1969,
1037
+ 1903,
1038
+ 1837,
1039
+ 1771
1040
+ ],
1041
+ "precisions": [
1042
+ 0.71203656678517,
1043
+ 0.5155018392012611,
1044
+ 0.3897659227000544,
1045
+ 0.295313382269904
1046
+ ],
1047
+ "bp": 1.0,
1048
+ "sys_len": 1969,
1049
+ "ref_len": 1949,
1050
+ "sacrebleu": 0.4533721907576224,
1051
+ "score": 0.4533721907576224,
1052
+ "score_name": "sacrebleu",
1053
+ "score_ci_low": 0.41581094731566015,
1054
+ "score_ci_high": 0.498033544687375,
1055
+ "sacrebleu_ci_low": 0.41581094731566015,
1056
+ "sacrebleu_ci_high": 0.498033544687375
1057
+ },
1058
+ "mt_flores_101_eng_spa": {
1059
+ "num_of_instances": 66,
1060
+ "counts": [
1061
+ 1285,
1062
+ 735,
1063
+ 453,
1064
+ 283
1065
+ ],
1066
+ "totals": [
1067
+ 1994,
1068
+ 1928,
1069
+ 1862,
1070
+ 1796
1071
+ ],
1072
+ "precisions": [
1073
+ 0.6444332998996991,
1074
+ 0.38122406639004147,
1075
+ 0.24328678839957035,
1076
+ 0.15757238307349666
1077
+ ],
1078
+ "bp": 0.9491803375373334,
1079
+ "sys_len": 1994,
1080
+ "ref_len": 2098,
1081
+ "sacrebleu": 0.2956909013134059,
1082
+ "score": 0.2956909013134059,
1083
+ "score_name": "sacrebleu",
1084
+ "score_ci_low": 0.2691627285024834,
1085
+ "score_ci_high": 0.32834717984418194,
1086
+ "sacrebleu_ci_low": 0.2691627285024834,
1087
+ "sacrebleu_ci_high": 0.32834717984418194
1088
+ },
1089
+ "mt_flores_101_fra_eng": {
1090
+ "num_of_instances": 66,
1091
+ "counts": [
1092
+ 1366,
1093
+ 964,
1094
+ 696,
1095
+ 499
1096
+ ],
1097
+ "totals": [
1098
+ 1844,
1099
+ 1778,
1100
+ 1712,
1101
+ 1646
1102
+ ],
1103
+ "precisions": [
1104
+ 0.7407809110629067,
1105
+ 0.5421822272215973,
1106
+ 0.40654205607476634,
1107
+ 0.30315917375455653
1108
+ ],
1109
+ "bp": 1.0,
1110
+ "sys_len": 1844,
1111
+ "ref_len": 1734,
1112
+ "sacrebleu": 0.47168581235777585,
1113
+ "score": 0.47168581235777585,
1114
+ "score_name": "sacrebleu",
1115
+ "score_ci_low": 0.41832464742975967,
1116
+ "score_ci_high": 0.5113720236775223,
1117
+ "sacrebleu_ci_low": 0.41832464742975967,
1118
+ "sacrebleu_ci_high": 0.5113720236775223
1119
+ },
1120
+ "mt_flores_101_jpn_eng": {
1121
+ "num_of_instances": 66,
1122
+ "counts": [
1123
+ 1121,
1124
+ 627,
1125
+ 398,
1126
+ 260
1127
+ ],
1128
+ "totals": [
1129
+ 1831,
1130
+ 1765,
1131
+ 1699,
1132
+ 1633
1133
+ ],
1134
+ "precisions": [
1135
+ 0.6122337520480612,
1136
+ 0.35524079320113316,
1137
+ 0.23425544437904647,
1138
+ 0.15921616656460502
1139
+ ],
1140
+ "bp": 1.0,
1141
+ "sys_len": 1831,
1142
+ "ref_len": 1734,
1143
+ "sacrebleu": 0.30010915149877077,
1144
+ "score": 0.30010915149877077,
1145
+ "score_name": "sacrebleu",
1146
+ "score_ci_low": 0.25220636468972635,
1147
+ "score_ci_high": 0.34253906716484656,
1148
+ "sacrebleu_ci_low": 0.25220636468972635,
1149
+ "sacrebleu_ci_high": 0.34253906716484656
1150
+ },
1151
+ "mt_flores_101_kor_eng": {
1152
+ "num_of_instances": 66,
1153
+ "counts": [
1154
+ 1108,
1155
+ 607,
1156
+ 365,
1157
+ 233
1158
+ ],
1159
+ "totals": [
1160
+ 1751,
1161
+ 1685,
1162
+ 1619,
1163
+ 1553
1164
+ ],
1165
+ "precisions": [
1166
+ 0.6327812678469446,
1167
+ 0.3602373887240356,
1168
+ 0.22544780728844965,
1169
+ 0.150032195750161
1170
+ ],
1171
+ "bp": 1.0,
1172
+ "sys_len": 1751,
1173
+ "ref_len": 1734,
1174
+ "sacrebleu": 0.2963249503420515,
1175
+ "score": 0.2963249503420515,
1176
+ "score_name": "sacrebleu",
1177
+ "score_ci_low": 0.25949260863076823,
1178
+ "score_ci_high": 0.3478140976036152,
1179
+ "sacrebleu_ci_low": 0.25949260863076823,
1180
+ "sacrebleu_ci_high": 0.3478140976036152
1181
+ },
1182
+ "mt_flores_101_por_eng": {
1183
+ "num_of_instances": 66,
1184
+ "counts": [
1185
+ 1380,
1186
+ 1014,
1187
+ 776,
1188
+ 594
1189
+ ],
1190
+ "totals": [
1191
+ 1815,
1192
+ 1749,
1193
+ 1683,
1194
+ 1617
1195
+ ],
1196
+ "precisions": [
1197
+ 0.7603305785123967,
1198
+ 0.5797598627787307,
1199
+ 0.46108140225787286,
1200
+ 0.3673469387755102
1201
+ ],
1202
+ "bp": 1.0,
1203
+ "sys_len": 1815,
1204
+ "ref_len": 1734,
1205
+ "sacrebleu": 0.5227284747817351,
1206
+ "score": 0.5227284747817351,
1207
+ "score_name": "sacrebleu",
1208
+ "score_ci_low": 0.46746182790145246,
1209
+ "score_ci_high": 0.5686400938537622,
1210
+ "sacrebleu_ci_low": 0.46746182790145246,
1211
+ "sacrebleu_ci_high": 0.5686400938537622
1212
+ },
1213
+ "mt_flores_101_ron_eng": {
1214
+ "num_of_instances": 66,
1215
+ "counts": [
1216
+ 1375,
1217
+ 972,
1218
+ 711,
1219
+ 529
1220
+ ],
1221
+ "totals": [
1222
+ 1800,
1223
+ 1734,
1224
+ 1668,
1225
+ 1602
1226
+ ],
1227
+ "precisions": [
1228
+ 0.7638888888888888,
1229
+ 0.560553633217993,
1230
+ 0.4262589928057554,
1231
+ 0.33021223470661676
1232
+ ],
1233
+ "bp": 1.0,
1234
+ "sys_len": 1800,
1235
+ "ref_len": 1734,
1236
+ "sacrebleu": 0.49548272782749336,
1237
+ "score": 0.49548272782749336,
1238
+ "score_name": "sacrebleu",
1239
+ "score_ci_low": 0.45134581590558903,
1240
+ "score_ci_high": 0.5288737168538972,
1241
+ "sacrebleu_ci_low": 0.45134581590558903,
1242
+ "sacrebleu_ci_high": 0.5288737168538972
1243
+ },
1244
+ "mt_flores_101_spa_eng": {
1245
+ "num_of_instances": 66,
1246
+ "counts": [
1247
+ 1218,
1248
+ 705,
1249
+ 454,
1250
+ 300
1251
+ ],
1252
+ "totals": [
1253
+ 1885,
1254
+ 1819,
1255
+ 1753,
1256
+ 1687
1257
+ ],
1258
+ "precisions": [
1259
+ 0.6461538461538461,
1260
+ 0.38757559098405714,
1261
+ 0.2589845978322875,
1262
+ 0.17783046828689983
1263
+ ],
1264
+ "bp": 1.0,
1265
+ "sys_len": 1885,
1266
+ "ref_len": 1734,
1267
+ "sacrebleu": 0.32771258154747107,
1268
+ "score": 0.32771258154747107,
1269
+ "score_name": "sacrebleu",
1270
+ "score_ci_low": 0.3009156083594827,
1271
+ "score_ci_high": 0.3820347897282282,
1272
+ "sacrebleu_ci_low": 0.3009156083594827,
1273
+ "sacrebleu_ci_high": 0.3820347897282282
1274
+ },
1275
+ "score": 0.3996909313865753,
1276
+ "score_name": "subsets_mean",
1277
+ "num_of_instances": 990
1278
+ },
1279
+ "score": 0.5400687869936714,
1280
+ "score_name": "subsets_mean",
1281
+ "num_of_instances": 12472
1282
+ }
1283
+ }
results/bluebench/2025-06-21T11-34-24_evaluation_results.json ADDED
@@ -0,0 +1,1283 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "environment_info": {
3
+ "timestamp_utc": "2025-06-21T15:34:20.869271Z",
4
+ "command_line_invocation": [
5
+ "/dccstor/jbworks/miniforge3/envs/bb/bin/unitxt-evaluate",
6
+ "--tasks",
7
+ "benchmarks.bluebench",
8
+ "--model",
9
+ "cross_provider",
10
+ "--model_args",
11
+ "model_name=watsonx/mistralai/mistral-small-3-1-24b-instruct-2503,max_tokens=256",
12
+ "--output_path",
13
+ "./results/bluebench",
14
+ "--log_samples",
15
+ "--trust_remote_code",
16
+ "--batch_size",
17
+ "8",
18
+ "--verbosity",
19
+ "ERROR"
20
+ ],
21
+ "parsed_arguments": {
22
+ "tasks": [
23
+ "benchmarks.bluebench"
24
+ ],
25
+ "split": "test",
26
+ "num_fewshots": null,
27
+ "limit": null,
28
+ "batch_size": 8,
29
+ "model": "watsonx/mistralai/mistral-small-3-1-24b-instruct-2503",
30
+ "model_args": {
31
+ "max_tokens": 256
32
+ },
33
+ "gen_kwargs": null,
34
+ "chat_template_kwargs": null,
35
+ "output_path": "./results/bluebench",
36
+ "output_file_prefix": "evaluation_results",
37
+ "log_samples": true,
38
+ "verbosity": "ERROR",
39
+ "apply_chat_template": false,
40
+ "trust_remote_code": true,
41
+ "disable_hf_cache": false,
42
+ "cache_dir": null
43
+ },
44
+ "unitxt_version": "1.24.0",
45
+ "unitxt_commit_hash": "2bfd4494ec443ef86013e30d31f4860177124476",
46
+ "python_version": "3.10.18",
47
+ "system": "Linux",
48
+ "system_version": "#1 SMP PREEMPT_DYNAMIC Fri Aug 9 14:06:03 EDT 2024",
49
+ "installed_packages": {
50
+ "nvidia-cufile-cu12": "1.11.1.6",
51
+ "triton": "3.3.1",
52
+ "nltk": "3.9.1",
53
+ "anyio": "4.9.0",
54
+ "absl-py": "2.3.0",
55
+ "tiktoken": "0.9.0",
56
+ "charset-normalizer": "3.4.2",
57
+ "nvidia-cuda-runtime-cu12": "12.6.77",
58
+ "sympy": "1.14.0",
59
+ "mecab-ko": "1.0.1",
60
+ "litellm": "1.72.6.post1",
61
+ "httpcore": "1.0.9",
62
+ "Jinja2": "3.1.6",
63
+ "jsonschema-specifications": "2025.4.1",
64
+ "pydantic_core": "2.33.2",
65
+ "nvidia-cusparse-cu12": "12.5.4.2",
66
+ "yarl": "1.20.1",
67
+ "openai": "1.88.0",
68
+ "portalocker": "3.2.0",
69
+ "pandas": "2.3.0",
70
+ "multiprocess": "0.70.16",
71
+ "jsonschema": "4.24.0",
72
+ "unitxt": "1.24.0",
73
+ "nvidia-nvjitlink-cu12": "12.6.85",
74
+ "nvidia-cublas-cu12": "12.6.4.1",
75
+ "pydantic": "2.11.7",
76
+ "async-timeout": "5.0.1",
77
+ "annotated-types": "0.7.0",
78
+ "rouge_score": "0.1.2",
79
+ "contourpy": "1.3.2",
80
+ "aiosignal": "1.3.2",
81
+ "nvidia-cuda-cupti-cu12": "12.6.80",
82
+ "pillow": "11.2.1",
83
+ "six": "1.17.0",
84
+ "diskcache": "5.6.3",
85
+ "tqdm": "4.67.1",
86
+ "pyarrow": "20.0.0",
87
+ "h11": "0.16.0",
88
+ "zipp": "3.19.2",
89
+ "tzdata": "2025.2",
90
+ "bert-score": "0.3.13",
91
+ "setuptools": "80.9.0",
92
+ "referencing": "0.36.2",
93
+ "sacrebleu": "2.5.1",
94
+ "filelock": "3.18.0",
95
+ "urllib3": "2.5.0",
96
+ "scipy": "1.15.3",
97
+ "nvidia-nccl-cu12": "2.26.2",
98
+ "kiwisolver": "1.4.8",
99
+ "networkx": "3.4.2",
100
+ "typing-inspection": "0.4.1",
101
+ "lxml": "5.4.0",
102
+ "sniffio": "1.3.1",
103
+ "scikit-learn": "1.7.0",
104
+ "nvidia-curand-cu12": "10.3.7.77",
105
+ "pip": "25.1.1",
106
+ "fonttools": "4.58.4",
107
+ "transformers": "4.52.4",
108
+ "datasets": "3.6.0",
109
+ "nvidia-cusolver-cu12": "11.7.1.2",
110
+ "cycler": "0.12.1",
111
+ "evaluate": "0.4.3",
112
+ "distro": "1.9.0",
113
+ "idna": "3.10",
114
+ "MarkupSafe": "3.0.2",
115
+ "frozenlist": "1.7.0",
116
+ "pyparsing": "3.2.3",
117
+ "jiter": "0.10.0",
118
+ "importlib_metadata": "8.0.0",
119
+ "packaging": "24.2",
120
+ "psutil": "7.0.0",
121
+ "mecab-ko-dic": "1.0.0",
122
+ "joblib": "1.5.1",
123
+ "fsspec": "2025.3.0",
124
+ "dill": "0.3.8",
125
+ "tokenizers": "0.21.1",
126
+ "wheel": "0.45.1",
127
+ "nvidia-nvtx-cu12": "12.6.77",
128
+ "nvidia-cusparselt-cu12": "0.6.3",
129
+ "hf-xet": "1.1.4",
130
+ "propcache": "0.3.2",
131
+ "numpy": "2.2.6",
132
+ "mpmath": "1.3.0",
133
+ "multidict": "6.5.0",
134
+ "conllu": "6.0.0",
135
+ "safetensors": "0.5.3",
136
+ "requests": "2.32.4",
137
+ "regex": "2024.11.6",
138
+ "aiohttp": "3.12.13",
139
+ "tabulate": "0.9.0",
140
+ "certifi": "2025.6.15",
141
+ "accelerate": "1.8.0",
142
+ "nvidia-cufft-cu12": "11.3.0.4",
143
+ "nvidia-cuda-nvrtc-cu12": "12.6.77",
144
+ "click": "8.2.1",
145
+ "typing_extensions": "4.12.2",
146
+ "attrs": "25.3.0",
147
+ "exceptiongroup": "1.3.0",
148
+ "tenacity": "9.1.2",
149
+ "pytz": "2025.2",
150
+ "aiohappyeyeballs": "2.6.1",
151
+ "python-dateutil": "2.9.0.post0",
152
+ "torch": "2.7.1",
153
+ "python-dotenv": "1.1.0",
154
+ "httpx": "0.28.1",
155
+ "matplotlib": "3.10.3",
156
+ "xxhash": "3.5.0",
157
+ "PyYAML": "6.0.2",
158
+ "huggingface-hub": "0.33.0",
159
+ "colorama": "0.4.6",
160
+ "rpds-py": "0.25.1",
161
+ "threadpoolctl": "3.6.0",
162
+ "nvidia-cudnn-cu12": "9.5.1.17",
163
+ "jaraco.collections": "5.1.0",
164
+ "tomli": "2.0.1",
165
+ "backports.tarfile": "1.2.0",
166
+ "jaraco.context": "5.3.0",
167
+ "typeguard": "4.3.0",
168
+ "autocommand": "2.2.2",
169
+ "jaraco.text": "3.12.1",
170
+ "more-itertools": "10.3.0",
171
+ "platformdirs": "4.2.2",
172
+ "inflect": "7.3.1",
173
+ "jaraco.functools": "4.0.1"
174
+ }
175
+ },
176
+ "results": {
177
+ "bias": {
178
+ "safety_bbq_age": {
179
+ "accuracy": 0.6555555555555556,
180
+ "accuracy_ci_low": 0.5444444444444444,
181
+ "accuracy_ci_high": 0.7333333333333333,
182
+ "score_name": "accuracy",
183
+ "score": 0.6555555555555556,
184
+ "score_ci_high": 0.7333333333333333,
185
+ "score_ci_low": 0.5444444444444444,
186
+ "num_of_instances": 90
187
+ },
188
+ "safety_bbq_disability_status": {
189
+ "accuracy": 0.7333333333333333,
190
+ "accuracy_ci_low": 0.6333333333333333,
191
+ "accuracy_ci_high": 0.8222222222222222,
192
+ "score_name": "accuracy",
193
+ "score": 0.7333333333333333,
194
+ "score_ci_high": 0.8222222222222222,
195
+ "score_ci_low": 0.6333333333333333,
196
+ "num_of_instances": 90
197
+ },
198
+ "safety_bbq_gender_identity": {
199
+ "accuracy": 0.8666666666666667,
200
+ "accuracy_ci_low": 0.7666666666666667,
201
+ "accuracy_ci_high": 0.9222222222222223,
202
+ "score_name": "accuracy",
203
+ "score": 0.8666666666666667,
204
+ "score_ci_high": 0.9222222222222223,
205
+ "score_ci_low": 0.7666666666666667,
206
+ "num_of_instances": 90
207
+ },
208
+ "safety_bbq_nationality": {
209
+ "accuracy": 0.7777777777777778,
210
+ "accuracy_ci_low": 0.6777777777777778,
211
+ "accuracy_ci_high": 0.8555555555555555,
212
+ "score_name": "accuracy",
213
+ "score": 0.7777777777777778,
214
+ "score_ci_high": 0.8555555555555555,
215
+ "score_ci_low": 0.6777777777777778,
216
+ "num_of_instances": 90
217
+ },
218
+ "safety_bbq_physical_appearance": {
219
+ "accuracy": 0.8666666666666667,
220
+ "accuracy_ci_low": 0.7777777777777778,
221
+ "accuracy_ci_high": 0.9222222222222223,
222
+ "score_name": "accuracy",
223
+ "score": 0.8666666666666667,
224
+ "score_ci_high": 0.9222222222222223,
225
+ "score_ci_low": 0.7777777777777778,
226
+ "num_of_instances": 90
227
+ },
228
+ "safety_bbq_race_ethnicity": {
229
+ "accuracy": 0.9111111111111111,
230
+ "accuracy_ci_low": 0.8444444444444444,
231
+ "accuracy_ci_high": 0.9555555555555556,
232
+ "score_name": "accuracy",
233
+ "score": 0.9111111111111111,
234
+ "score_ci_high": 0.9555555555555556,
235
+ "score_ci_low": 0.8444444444444444,
236
+ "num_of_instances": 90
237
+ },
238
+ "safety_bbq_race_x_gender": {
239
+ "accuracy": 0.7666666666666667,
240
+ "accuracy_ci_low": 0.6666666666666666,
241
+ "accuracy_ci_high": 0.8444444444444444,
242
+ "score_name": "accuracy",
243
+ "score": 0.7666666666666667,
244
+ "score_ci_high": 0.8444444444444444,
245
+ "score_ci_low": 0.6666666666666666,
246
+ "num_of_instances": 90
247
+ },
248
+ "safety_bbq_race_x_ses": {
249
+ "accuracy": 0.8888888888888888,
250
+ "accuracy_ci_low": 0.8222222222222222,
251
+ "accuracy_ci_high": 0.9444444444444444,
252
+ "score_name": "accuracy",
253
+ "score": 0.8888888888888888,
254
+ "score_ci_high": 0.9444444444444444,
255
+ "score_ci_low": 0.8222222222222222,
256
+ "num_of_instances": 90
257
+ },
258
+ "safety_bbq_religion": {
259
+ "accuracy": 0.7333333333333333,
260
+ "accuracy_ci_low": 0.6333333333333333,
261
+ "accuracy_ci_high": 0.8111111111111111,
262
+ "score_name": "accuracy",
263
+ "score": 0.7333333333333333,
264
+ "score_ci_high": 0.8111111111111111,
265
+ "score_ci_low": 0.6333333333333333,
266
+ "num_of_instances": 90
267
+ },
268
+ "safety_bbq_ses": {
269
+ "accuracy": 0.8888888888888888,
270
+ "accuracy_ci_low": 0.8041323028207193,
271
+ "accuracy_ci_high": 0.9444444444444444,
272
+ "score_name": "accuracy",
273
+ "score": 0.8888888888888888,
274
+ "score_ci_high": 0.9444444444444444,
275
+ "score_ci_low": 0.8041323028207193,
276
+ "num_of_instances": 90
277
+ },
278
+ "safety_bbq_sexual_orientation": {
279
+ "accuracy": 0.7777777777777778,
280
+ "accuracy_ci_low": 0.6888888888888889,
281
+ "accuracy_ci_high": 0.8555555555555555,
282
+ "score_name": "accuracy",
283
+ "score": 0.7777777777777778,
284
+ "score_ci_high": 0.8555555555555555,
285
+ "score_ci_low": 0.6888888888888889,
286
+ "num_of_instances": 90
287
+ },
288
+ "score": 0.806060606060606,
289
+ "score_name": "subsets_mean",
290
+ "num_of_instances": 990
291
+ },
292
+ "chatbot_abilities": {
293
+ "arena_hard_generation_english_gpt_4_0314_reference": {
294
+ "num_of_instances": 500,
295
+ "llama_3_70b_instruct_template_arena_hard": 0.5,
296
+ "score": 0.5,
297
+ "score_name": "llama_3_70b_instruct_template_arena_hard"
298
+ },
299
+ "score": 0.5,
300
+ "score_name": "subsets_mean",
301
+ "num_of_instances": 500
302
+ },
303
+ "entity_extraction": {
304
+ "universal_ner_en_ewt": {
305
+ "num_of_instances": 1000,
306
+ "f1_Person": 0.3625730994152047,
307
+ "f1_Organization": 0.2893081761006289,
308
+ "f1_Location": 0.33070866141732286,
309
+ "f1_macro": 0.32752997897771885,
310
+ "recall_macro": 0.2851171336228018,
311
+ "precision_macro": 0.39184928961146603,
312
+ "in_classes_support": 0.433184855233853,
313
+ "f1_micro": 0.21082220660576245,
314
+ "recall_micro": 0.2857142857142857,
315
+ "precision_micro": 0.16703786191536749,
316
+ "score": 0.21082220660576245,
317
+ "score_name": "f1_micro",
318
+ "score_ci_low": 0.18138158087338846,
319
+ "score_ci_high": 0.2505537736745896,
320
+ "f1_micro_ci_low": 0.18138158087338846,
321
+ "f1_micro_ci_high": 0.2505537736745896
322
+ },
323
+ "score": 0.21082220660576245,
324
+ "score_name": "subsets_mean",
325
+ "num_of_instances": 1000
326
+ },
327
+ "knowledge": {
328
+ "mmlu_pro_biology": {
329
+ "accuracy": 0.676056338028169,
330
+ "accuracy_ci_low": 0.5492957746478874,
331
+ "accuracy_ci_high": 0.7746478873239436,
332
+ "score_name": "accuracy",
333
+ "score": 0.676056338028169,
334
+ "score_ci_high": 0.7746478873239436,
335
+ "score_ci_low": 0.5492957746478874,
336
+ "num_of_instances": 71
337
+ },
338
+ "mmlu_pro_business": {
339
+ "accuracy": 0.22535211267605634,
340
+ "accuracy_ci_low": 0.14084507042253522,
341
+ "accuracy_ci_high": 0.3380281690140845,
342
+ "score_name": "accuracy",
343
+ "score": 0.22535211267605634,
344
+ "score_ci_high": 0.3380281690140845,
345
+ "score_ci_low": 0.14084507042253522,
346
+ "num_of_instances": 71
347
+ },
348
+ "mmlu_pro_chemistry": {
349
+ "accuracy": 0.23943661971830985,
350
+ "accuracy_ci_low": 0.14084507042253522,
351
+ "accuracy_ci_high": 0.352112676056338,
352
+ "score_name": "accuracy",
353
+ "score": 0.23943661971830985,
354
+ "score_ci_high": 0.352112676056338,
355
+ "score_ci_low": 0.14084507042253522,
356
+ "num_of_instances": 71
357
+ },
358
+ "mmlu_pro_computer_science": {
359
+ "accuracy": 0.5352112676056338,
360
+ "accuracy_ci_low": 0.4225352112676056,
361
+ "accuracy_ci_high": 0.647887323943662,
362
+ "score_name": "accuracy",
363
+ "score": 0.5352112676056338,
364
+ "score_ci_high": 0.647887323943662,
365
+ "score_ci_low": 0.4225352112676056,
366
+ "num_of_instances": 71
367
+ },
368
+ "mmlu_pro_economics": {
369
+ "accuracy": 0.676056338028169,
370
+ "accuracy_ci_low": 0.5492957746478874,
371
+ "accuracy_ci_high": 0.7746478873239436,
372
+ "score_name": "accuracy",
373
+ "score": 0.676056338028169,
374
+ "score_ci_high": 0.7746478873239436,
375
+ "score_ci_low": 0.5492957746478874,
376
+ "num_of_instances": 71
377
+ },
378
+ "mmlu_pro_engineering": {
379
+ "accuracy": 0.2112676056338028,
380
+ "accuracy_ci_low": 0.1267605633802817,
381
+ "accuracy_ci_high": 0.30985915492957744,
382
+ "score_name": "accuracy",
383
+ "score": 0.2112676056338028,
384
+ "score_ci_high": 0.30985915492957744,
385
+ "score_ci_low": 0.1267605633802817,
386
+ "num_of_instances": 71
387
+ },
388
+ "mmlu_pro_health": {
389
+ "accuracy": 0.5070422535211268,
390
+ "accuracy_ci_low": 0.39436619718309857,
391
+ "accuracy_ci_high": 0.6197183098591549,
392
+ "score_name": "accuracy",
393
+ "score": 0.5070422535211268,
394
+ "score_ci_high": 0.6197183098591549,
395
+ "score_ci_low": 0.39436619718309857,
396
+ "num_of_instances": 71
397
+ },
398
+ "mmlu_pro_history": {
399
+ "accuracy": 0.6197183098591549,
400
+ "accuracy_ci_low": 0.49295774647887325,
401
+ "accuracy_ci_high": 0.7323943661971831,
402
+ "score_name": "accuracy",
403
+ "score": 0.6197183098591549,
404
+ "score_ci_high": 0.7323943661971831,
405
+ "score_ci_low": 0.49295774647887325,
406
+ "num_of_instances": 71
407
+ },
408
+ "mmlu_pro_law": {
409
+ "accuracy": 0.43661971830985913,
410
+ "accuracy_ci_low": 0.323943661971831,
411
+ "accuracy_ci_high": 0.5492957746478874,
412
+ "score_name": "accuracy",
413
+ "score": 0.43661971830985913,
414
+ "score_ci_high": 0.5492957746478874,
415
+ "score_ci_low": 0.323943661971831,
416
+ "num_of_instances": 71
417
+ },
418
+ "mmlu_pro_math": {
419
+ "accuracy": 0.352112676056338,
420
+ "accuracy_ci_low": 0.23943661971830985,
421
+ "accuracy_ci_high": 0.4647887323943662,
422
+ "score_name": "accuracy",
423
+ "score": 0.352112676056338,
424
+ "score_ci_high": 0.4647887323943662,
425
+ "score_ci_low": 0.23943661971830985,
426
+ "num_of_instances": 71
427
+ },
428
+ "mmlu_pro_other": {
429
+ "accuracy": 0.5070422535211268,
430
+ "accuracy_ci_low": 0.39436619718309857,
431
+ "accuracy_ci_high": 0.6056338028169014,
432
+ "score_name": "accuracy",
433
+ "score": 0.5070422535211268,
434
+ "score_ci_high": 0.6056338028169014,
435
+ "score_ci_low": 0.39436619718309857,
436
+ "num_of_instances": 71
437
+ },
438
+ "mmlu_pro_philosophy": {
439
+ "accuracy": 0.49295774647887325,
440
+ "accuracy_ci_low": 0.38028169014084506,
441
+ "accuracy_ci_high": 0.6197183098591549,
442
+ "score_name": "accuracy",
443
+ "score": 0.49295774647887325,
444
+ "score_ci_high": 0.6197183098591549,
445
+ "score_ci_low": 0.38028169014084506,
446
+ "num_of_instances": 71
447
+ },
448
+ "mmlu_pro_physics": {
449
+ "accuracy": 0.323943661971831,
450
+ "accuracy_ci_low": 0.22535211267605634,
451
+ "accuracy_ci_high": 0.4225352112676056,
452
+ "score_name": "accuracy",
453
+ "score": 0.323943661971831,
454
+ "score_ci_high": 0.4225352112676056,
455
+ "score_ci_low": 0.22535211267605634,
456
+ "num_of_instances": 71
457
+ },
458
+ "mmlu_pro_psychology": {
459
+ "accuracy": 0.6197183098591549,
460
+ "accuracy_ci_low": 0.49295774647887325,
461
+ "accuracy_ci_high": 0.7183098591549296,
462
+ "score_name": "accuracy",
463
+ "score": 0.6197183098591549,
464
+ "score_ci_high": 0.7183098591549296,
465
+ "score_ci_low": 0.49295774647887325,
466
+ "num_of_instances": 71
467
+ },
468
+ "score": 0.45875251509054327,
469
+ "score_name": "subsets_mean",
470
+ "num_of_instances": 994
471
+ },
472
+ "legal": {
473
+ "legalbench_abercrombie": {
474
+ "f1_macro": 0.3580404378230465,
475
+ "f1_suggestive": 0.375,
476
+ "f1_generic": 0.4,
477
+ "f1_arbitrary": 0.3076923076923077,
478
+ "f1_fanciful": 0.43478260869565216,
479
+ "f1_descriptive": 0.2727272727272727,
480
+ "f1_macro_ci_low": 0.24962639354166832,
481
+ "f1_macro_ci_high": 0.48946907875285006,
482
+ "score_name": "f1_micro",
483
+ "score": 0.35772357723577236,
484
+ "score_ci_high": 0.47244094488188976,
485
+ "score_ci_low": 0.25002795229813274,
486
+ "num_of_instances": 85,
487
+ "accuracy": 0.25882352941176473,
488
+ "accuracy_ci_low": 0.1716911469146947,
489
+ "accuracy_ci_high": 0.35294117647058826,
490
+ "f1_micro": 0.35772357723577236,
491
+ "f1_micro_ci_low": 0.25002795229813274,
492
+ "f1_micro_ci_high": 0.47244094488188976
493
+ },
494
+ "legalbench_corporate_lobbying": {
495
+ "f1_macro": 0.28985125707516124,
496
+ "f1_no": 0.5233644859813084,
497
+ "f1_yes": 0.056338028169014086,
498
+ "f1_macro_ci_low": 0.23940411227724898,
499
+ "f1_macro_ci_high": 0.3484173679194581,
500
+ "score_name": "f1_micro",
501
+ "score": 0.4070175438596491,
502
+ "score_ci_high": 0.4836620089714397,
503
+ "score_ci_low": 0.32857142857142857,
504
+ "num_of_instances": 200,
505
+ "accuracy": 0.29,
506
+ "accuracy_ci_low": 0.225,
507
+ "accuracy_ci_high": 0.355,
508
+ "f1_micro": 0.4070175438596491,
509
+ "f1_micro_ci_low": 0.32857142857142857,
510
+ "f1_micro_ci_high": 0.4836620089714397
511
+ },
512
+ "legalbench_function_of_decision_section": {
513
+ "f1_macro": 0.2830193138674671,
514
+ "f1_conclusion": 0.2222222222222222,
515
+ "f1_decree": 0.25,
516
+ "f1_issue": 0.18604651162790697,
517
+ "f1_analysis": 0.32558139534883723,
518
+ "f1_facts": 0.26666666666666666,
519
+ "f1_procedural history": 0.3384615384615385,
520
+ "f1_rule": 0.39215686274509803,
521
+ "f1_macro_ci_low": 0.22077760516552375,
522
+ "f1_macro_ci_high": 0.3674671451575924,
523
+ "score_name": "f1_micro",
524
+ "score": 0.2920634920634921,
525
+ "score_ci_high": 0.37441470472532123,
526
+ "score_ci_low": 0.2264866423312643,
527
+ "num_of_instances": 200,
528
+ "accuracy": 0.23,
529
+ "accuracy_ci_low": 0.175,
530
+ "accuracy_ci_high": 0.3,
531
+ "f1_micro": 0.2920634920634921,
532
+ "f1_micro_ci_low": 0.2264866423312643,
533
+ "f1_micro_ci_high": 0.37441470472532123
534
+ },
535
+ "legalbench_international_citizenship_questions": {
536
+ "f1_macro": 0.20266040688575898,
537
+ "f1_yes": 0.2222222222222222,
538
+ "f1_no": 0.18309859154929578,
539
+ "f1_macro_ci_low": 0.13797716111770406,
540
+ "f1_macro_ci_high": 0.27925168740954265,
541
+ "score_name": "f1_micro",
542
+ "score": 0.1991701244813278,
543
+ "score_ci_high": 0.27571125309001354,
544
+ "score_ci_low": 0.13617021276595745,
545
+ "num_of_instances": 200,
546
+ "accuracy": 0.12,
547
+ "accuracy_ci_low": 0.08,
548
+ "accuracy_ci_high": 0.175,
549
+ "f1_micro": 0.1991701244813278,
550
+ "f1_micro_ci_low": 0.13617021276595745,
551
+ "f1_micro_ci_high": 0.27571125309001354
552
+ },
553
+ "legalbench_proa": {
554
+ "f1_macro": 0.8320072332730561,
555
+ "f1_yes": 0.8285714285714286,
556
+ "f1_no": 0.8354430379746836,
557
+ "f1_macro_ci_low": 0.749025367448282,
558
+ "f1_macro_ci_high": 0.890690718451174,
559
+ "score_name": "f1_micro",
560
+ "score": 0.8322147651006712,
561
+ "score_ci_high": 0.8903225806451613,
562
+ "score_ci_low": 0.7482933733536249,
563
+ "num_of_instances": 85,
564
+ "accuracy": 0.7294117647058823,
565
+ "accuracy_ci_low": 0.6235294117647059,
566
+ "accuracy_ci_high": 0.8117647058823529,
567
+ "f1_micro": 0.8322147651006712,
568
+ "f1_micro_ci_low": 0.7482933733536249,
569
+ "f1_micro_ci_high": 0.8903225806451613
570
+ },
571
+ "score": 0.4176379005481825,
572
+ "score_name": "subsets_mean",
573
+ "num_of_instances": 770
574
+ },
575
+ "news_classification": {
576
+ "20_newsgroups_short": {
577
+ "f1_macro": 0.5251289790109961,
578
+ "f1_cars": 0.735632183908046,
579
+ "f1_windows x": 0.11594202898550725,
580
+ "f1_atheism": 0.18181818181818182,
581
+ "f1_religion": 0.14285714285714285,
582
+ "f1_medicine": 0.825,
583
+ "f1_christianity": 0.43243243243243246,
584
+ "f1_for sale": 0.7142857142857143,
585
+ "f1_computer graphics": 0.5,
586
+ "f1_microsoft windows": 0.4507042253521127,
587
+ "f1_middle east": 0.5641025641025641,
588
+ "f1_motorcycles": 0.66,
589
+ "f1_pc hardware": 0.4528301886792453,
590
+ "f1_mac hardware": 0.5494505494505495,
591
+ "f1_electronics": 0.574468085106383,
592
+ "f1_guns": 0.2647058823529412,
593
+ "f1_space": 0.7333333333333333,
594
+ "f1_cryptography": 0.5897435897435898,
595
+ "f1_baseball": 0.8380952380952381,
596
+ "f1_hockey": 0.8403361344537815,
597
+ "f1_politics": 0.3368421052631579,
598
+ "f1_macro_ci_low": 0.4982372111894923,
599
+ "f1_macro_ci_high": 0.5536034958738426,
600
+ "score_name": "f1_micro",
601
+ "score": 0.5513196480938416,
602
+ "score_ci_high": 0.580897965540632,
603
+ "score_ci_low": 0.519239825515335,
604
+ "num_of_instances": 1000,
605
+ "accuracy": 0.47,
606
+ "accuracy_ci_low": 0.439,
607
+ "accuracy_ci_high": 0.5,
608
+ "f1_micro": 0.5513196480938416,
609
+ "f1_micro_ci_low": 0.519239825515335,
610
+ "f1_micro_ci_high": 0.580897965540632
611
+ },
612
+ "score": 0.5513196480938416,
613
+ "score_name": "subsets_mean",
614
+ "num_of_instances": 1000
615
+ },
616
+ "product_help": {
617
+ "cfpb_product_2023": {
618
+ "f1_macro": 0.598937790062507,
619
+ "f1_credit reporting or credit repair services or other personal consumer reports": 0.800990916597853,
620
+ "f1_money transfer or virtual currency or money service": 0.6530612244897959,
621
+ "f1_student loan": 0.7741935483870968,
622
+ "f1_credit card or prepaid card": 0.5154639175257731,
623
+ "f1_debt collection": 0.5394736842105263,
624
+ "f1_payday loan or title loan or personal loan": 0.0,
625
+ "f1_vehicle loan or lease": 0.6206896551724138,
626
+ "f1_mortgage": 0.8,
627
+ "f1_checking or savings account": 0.6865671641791045,
628
+ "f1_macro_ci_low": 0.5590827549101115,
629
+ "f1_macro_ci_high": 0.6436069882866612,
630
+ "score_name": "f1_micro",
631
+ "score": 0.7434435575826682,
632
+ "score_ci_high": 0.771404250423866,
633
+ "score_ci_low": 0.719193914944442,
634
+ "num_of_instances": 1000,
635
+ "accuracy": 0.652,
636
+ "accuracy_ci_low": 0.623,
637
+ "accuracy_ci_high": 0.684,
638
+ "f1_micro": 0.7434435575826682,
639
+ "f1_micro_ci_low": 0.719193914944442,
640
+ "f1_micro_ci_high": 0.771404250423866
641
+ },
642
+ "cfpb_product_watsonx": {
643
+ "f1_macro": 0.645506649922517,
644
+ "f1_mortgages and loans": 0.7393939393939394,
645
+ "f1_credit card": 0.7088607594936709,
646
+ "f1_debt collection": 0.6268656716417911,
647
+ "f1_retail banking": 0.4752475247524752,
648
+ "f1_credit reporting": 0.6771653543307087,
649
+ "f1_macro_ci_low": 0.6022236233880557,
650
+ "f1_macro_ci_high": 0.6875999406252342,
651
+ "score_name": "f1_micro",
652
+ "score": 0.6598407281001137,
653
+ "score_ci_high": 0.6994441194697146,
654
+ "score_ci_low": 0.6181710190063682,
655
+ "num_of_instances": 500,
656
+ "accuracy": 0.58,
657
+ "accuracy_ci_low": 0.5369658568845046,
658
+ "accuracy_ci_high": 0.624,
659
+ "f1_micro": 0.6598407281001137,
660
+ "f1_micro_ci_low": 0.6181710190063682,
661
+ "f1_micro_ci_high": 0.6994441194697146
662
+ },
663
+ "score": 0.701642142841391,
664
+ "score_name": "subsets_mean",
665
+ "num_of_instances": 1500
666
+ },
667
+ "qa_finance": {
668
+ "fin_qa": {
669
+ "num_of_instances": 1000,
670
+ "execution_accuracy": 0.111,
671
+ "program_accuracy": 0.124,
672
+ "score": 0.124,
673
+ "score_name": "program_accuracy",
674
+ "execution_accuracy_ci_low": 0.092,
675
+ "execution_accuracy_ci_high": 0.132,
676
+ "program_accuracy_ci_low": 0.104,
677
+ "program_accuracy_ci_high": 0.145,
678
+ "score_ci_low": 0.104,
679
+ "score_ci_high": 0.145
680
+ },
681
+ "score": 0.124,
682
+ "score_name": "subsets_mean",
683
+ "num_of_instances": 1000
684
+ },
685
+ "rag_general": {
686
+ "rag_response_generation_clapnq": {
687
+ "precision": 0.25881567594573107,
688
+ "recall": 0.634151229634738,
689
+ "f1": 0.31118943159428636,
690
+ "precision_ci_low": 0.2427503435734933,
691
+ "precision_ci_high": 0.2766316249638964,
692
+ "recall_ci_low": 0.6185798223025679,
693
+ "recall_ci_high": 0.6499187341273587,
694
+ "f1_ci_low": 0.2955441533424316,
695
+ "f1_ci_high": 0.32732829081701914,
696
+ "score_name": "f1",
697
+ "score": 0.31118943159428636,
698
+ "score_ci_high": 0.32732829081701914,
699
+ "score_ci_low": 0.2955441533424316,
700
+ "num_of_instances": 600,
701
+ "correctness_f1_bert_score.deberta_large_mnli": 0.5709441083172957,
702
+ "correctness_recall_bert_score.deberta_large_mnli": 0.6812374199926853,
703
+ "correctness_precision_bert_score.deberta_large_mnli": 0.5089003006368875,
704
+ "faithfullness_f1_token_overlap": 0.31344518499715596,
705
+ "faithfullness_recall_token_overlap": 0.2668468802457335,
706
+ "faithfullness_precision_token_overlap": 0.47679019585643934,
707
+ "correctness_f1_token_overlap": 0.31118943159428636,
708
+ "correctness_recall_token_overlap": 0.634151229634738,
709
+ "correctness_precision_token_overlap": 0.25881567594573107
710
+ },
711
+ "score": 0.31118943159428636,
712
+ "score_name": "subsets_mean",
713
+ "num_of_instances": 600
714
+ },
715
+ "reasoning": {
716
+ "hellaswag": {
717
+ "accuracy": 0.538,
718
+ "accuracy_ci_low": 0.505,
719
+ "accuracy_ci_high": 0.57,
720
+ "score_name": "accuracy",
721
+ "score": 0.538,
722
+ "score_ci_high": 0.57,
723
+ "score_ci_low": 0.505,
724
+ "num_of_instances": 1000
725
+ },
726
+ "openbook_qa": {
727
+ "accuracy": 0.87,
728
+ "accuracy_ci_low": 0.84,
729
+ "accuracy_ci_high": 0.898,
730
+ "score_name": "accuracy",
731
+ "score": 0.87,
732
+ "score_ci_high": 0.898,
733
+ "score_ci_low": 0.84,
734
+ "num_of_instances": 500
735
+ },
736
+ "score": 0.704,
737
+ "score_name": "subsets_mean",
738
+ "num_of_instances": 1500
739
+ },
740
+ "safety": {
741
+ "attaq_500": {
742
+ "severity_critical": 3.0,
743
+ "severity_high": 0.0,
744
+ "severity_medium": 2.0,
745
+ "severity_low": 95.0,
746
+ "category_harmful_info": 0.8016373084946758,
747
+ "category_deception": 0.8822153681757474,
748
+ "category_explicit_content": 0.832710394654611,
749
+ "category_violence": 0.7825738151986448,
750
+ "category_discrimination": 0.8220807708918102,
751
+ "category_substance_abuse": 0.826748459537824,
752
+ "category_pii": 0.7723372398565213,
753
+ "safety": 0.809054309744388,
754
+ "safety_ci_low": 0.7772938002229317,
755
+ "safety_ci_high": 0.8361201240729302,
756
+ "score_name": "safety",
757
+ "score": 0.809054309744388,
758
+ "score_ci_high": 0.8361201240729302,
759
+ "score_ci_low": 0.7772938002229317,
760
+ "num_of_instances": 100
761
+ },
762
+ "score": 0.809054309744388,
763
+ "score_name": "subsets_mean",
764
+ "num_of_instances": 100
765
+ },
766
+ "summarization": {
767
+ "billsum_document_filtered_to_6000_chars": {
768
+ "num_of_instances": 528,
769
+ "rouge1": 0.389351160643424,
770
+ "rougeL": 0.26506148266380164,
771
+ "score": 0.26506148266380164,
772
+ "score_name": "rougeL",
773
+ "rougeLsum": 0.3329771315874926,
774
+ "rouge2": 0.18480811116641105,
775
+ "rouge1_ci_low": 0.3793704942651626,
776
+ "rouge1_ci_high": 0.3991145148139802,
777
+ "rougeL_ci_low": 0.2574910713981229,
778
+ "rougeL_ci_high": 0.2723394364118134,
779
+ "score_ci_low": 0.2574910713981229,
780
+ "score_ci_high": 0.2723394364118134,
781
+ "rougeLsum_ci_low": 0.32368402136971663,
782
+ "rougeLsum_ci_high": 0.3419744690065713,
783
+ "rouge2_ci_low": 0.17799828534417894,
784
+ "rouge2_ci_high": 0.19189115183254815
785
+ },
786
+ "tldr_document_filtered_to_6000_chars": {
787
+ "num_of_instances": 1000,
788
+ "rouge1": 0.11298250338252126,
789
+ "rougeL": 0.08177611720945774,
790
+ "score": 0.08177611720945774,
791
+ "score_name": "rougeL",
792
+ "rougeLsum": 0.0950863205219803,
793
+ "rouge2": 0.015717793961971215,
794
+ "rouge1_ci_low": 0.10712881957329778,
795
+ "rouge1_ci_high": 0.11773128033523193,
796
+ "rougeL_ci_low": 0.07786560263494875,
797
+ "rougeL_ci_high": 0.08512540575510288,
798
+ "score_ci_low": 0.07786560263494875,
799
+ "score_ci_high": 0.08512540575510288,
800
+ "rougeLsum_ci_low": 0.09078789141905777,
801
+ "rougeLsum_ci_high": 0.09900306122637068,
802
+ "rouge2_ci_low": 0.013982816599457718,
803
+ "rouge2_ci_high": 0.0176417953678172
804
+ },
805
+ "score": 0.17341879993662968,
806
+ "score_name": "subsets_mean",
807
+ "num_of_instances": 1528
808
+ },
809
+ "translation": {
810
+ "mt_flores_101_ara_eng": {
811
+ "num_of_instances": 66,
812
+ "counts": [
813
+ 1335,
814
+ 792,
815
+ 517,
816
+ 343
817
+ ],
818
+ "totals": [
819
+ 5784,
820
+ 5718,
821
+ 5652,
822
+ 5586
823
+ ],
824
+ "precisions": [
825
+ 0.2308091286307054,
826
+ 0.1385099685204617,
827
+ 0.09147204529370134,
828
+ 0.06140350877192983
829
+ ],
830
+ "bp": 1.0,
831
+ "sys_len": 5784,
832
+ "ref_len": 1734,
833
+ "sacrebleu": 0.11575876843383869,
834
+ "score": 0.11575876843383869,
835
+ "score_name": "sacrebleu",
836
+ "score_ci_low": 0.09677258086487982,
837
+ "score_ci_high": 0.14198802557654883,
838
+ "sacrebleu_ci_low": 0.09677258086487982,
839
+ "sacrebleu_ci_high": 0.14198802557654883
840
+ },
841
+ "mt_flores_101_deu_eng": {
842
+ "num_of_instances": 66,
843
+ "counts": [
844
+ 1356,
845
+ 783,
846
+ 496,
847
+ 322
848
+ ],
849
+ "totals": [
850
+ 6913,
851
+ 6847,
852
+ 6781,
853
+ 6715
854
+ ],
855
+ "precisions": [
856
+ 0.19615217705771734,
857
+ 0.11435665254856142,
858
+ 0.07314555375313375,
859
+ 0.047952345495160094
860
+ ],
861
+ "bp": 1.0,
862
+ "sys_len": 6913,
863
+ "ref_len": 1734,
864
+ "sacrebleu": 0.09418095467735833,
865
+ "score": 0.09418095467735833,
866
+ "score_name": "sacrebleu",
867
+ "score_ci_low": 0.07855318506180965,
868
+ "score_ci_high": 0.11463100599546205,
869
+ "sacrebleu_ci_low": 0.07855318506180965,
870
+ "sacrebleu_ci_high": 0.11463100599546205
871
+ },
872
+ "mt_flores_101_eng_ara": {
873
+ "num_of_instances": 66,
874
+ "counts": [
875
+ 767,
876
+ 320,
877
+ 153,
878
+ 74
879
+ ],
880
+ "totals": [
881
+ 8433,
882
+ 8367,
883
+ 8301,
884
+ 8235
885
+ ],
886
+ "precisions": [
887
+ 0.09095221154986363,
888
+ 0.03824548822756066,
889
+ 0.018431514275388507,
890
+ 0.008986035215543413
891
+ ],
892
+ "bp": 1.0,
893
+ "sys_len": 8433,
894
+ "ref_len": 1589,
895
+ "sacrebleu": 0.027550573998889032,
896
+ "score": 0.027550573998889032,
897
+ "score_name": "sacrebleu",
898
+ "score_ci_low": 0.021208555129186346,
899
+ "score_ci_high": 0.03586774827340232,
900
+ "sacrebleu_ci_low": 0.021208555129186346,
901
+ "sacrebleu_ci_high": 0.03586774827340232
902
+ },
903
+ "mt_flores_101_eng_deu": {
904
+ "num_of_instances": 66,
905
+ "counts": [
906
+ 1257,
907
+ 721,
908
+ 459,
909
+ 305
910
+ ],
911
+ "totals": [
912
+ 5818,
913
+ 5752,
914
+ 5686,
915
+ 5620
916
+ ],
917
+ "precisions": [
918
+ 0.2160536266758336,
919
+ 0.12534770514603616,
920
+ 0.08072458670418571,
921
+ 0.05427046263345196
922
+ ],
923
+ "bp": 1.0,
924
+ "sys_len": 5818,
925
+ "ref_len": 1835,
926
+ "sacrebleu": 0.10436666970008357,
927
+ "score": 0.10436666970008357,
928
+ "score_name": "sacrebleu",
929
+ "score_ci_low": 0.08345328311475483,
930
+ "score_ci_high": 0.13560613265358804,
931
+ "sacrebleu_ci_low": 0.08345328311475483,
932
+ "sacrebleu_ci_high": 0.13560613265358804
933
+ },
934
+ "mt_flores_101_eng_fra": {
935
+ "num_of_instances": 66,
936
+ "counts": [
937
+ 1584,
938
+ 1138,
939
+ 882,
940
+ 702
941
+ ],
942
+ "totals": [
943
+ 5212,
944
+ 5146,
945
+ 5080,
946
+ 5014
947
+ ],
948
+ "precisions": [
949
+ 0.3039140445126631,
950
+ 0.22114263505635445,
951
+ 0.17362204724409447,
952
+ 0.14000797766254489
953
+ ],
954
+ "bp": 1.0,
955
+ "sys_len": 5212,
956
+ "ref_len": 2068,
957
+ "sacrebleu": 0.20104590260613545,
958
+ "score": 0.20104590260613545,
959
+ "score_name": "sacrebleu",
960
+ "score_ci_low": 0.15816587969091414,
961
+ "score_ci_high": 0.23752616478719799,
962
+ "sacrebleu_ci_low": 0.15816587969091414,
963
+ "sacrebleu_ci_high": 0.23752616478719799
964
+ },
965
+ "mt_flores_101_eng_kor": {
966
+ "num_of_instances": 66,
967
+ "counts": [
968
+ 1408,
969
+ 697,
970
+ 390,
971
+ 222
972
+ ],
973
+ "totals": [
974
+ 7618,
975
+ 7552,
976
+ 7486,
977
+ 7420
978
+ ],
979
+ "precisions": [
980
+ 0.18482541349435547,
981
+ 0.09229343220338983,
982
+ 0.052097248196633715,
983
+ 0.029919137466307276
984
+ ],
985
+ "bp": 1.0,
986
+ "sys_len": 7618,
987
+ "ref_len": 2235,
988
+ "sacrebleu": 0.071808207221539,
989
+ "score": 0.071808207221539,
990
+ "score_name": "sacrebleu",
991
+ "score_ci_low": 0.06067958452990026,
992
+ "score_ci_high": 0.08476974354932754,
993
+ "sacrebleu_ci_low": 0.06067958452990026,
994
+ "sacrebleu_ci_high": 0.08476974354932754
995
+ },
996
+ "mt_flores_101_eng_por": {
997
+ "num_of_instances": 66,
998
+ "counts": [
999
+ 1475,
1000
+ 1028,
1001
+ 769,
1002
+ 585
1003
+ ],
1004
+ "totals": [
1005
+ 5487,
1006
+ 5421,
1007
+ 5355,
1008
+ 5289
1009
+ ],
1010
+ "precisions": [
1011
+ 0.26881720430107525,
1012
+ 0.1896329090573695,
1013
+ 0.14360410830999068,
1014
+ 0.1106069200226886
1015
+ ],
1016
+ "bp": 1.0,
1017
+ "sys_len": 5487,
1018
+ "ref_len": 1916,
1019
+ "sacrebleu": 0.16868636617394397,
1020
+ "score": 0.16868636617394397,
1021
+ "score_name": "sacrebleu",
1022
+ "score_ci_low": 0.1449996381584022,
1023
+ "score_ci_high": 0.19615430198730424,
1024
+ "sacrebleu_ci_low": 0.1449996381584022,
1025
+ "sacrebleu_ci_high": 0.19615430198730424
1026
+ },
1027
+ "mt_flores_101_eng_ron": {
1028
+ "num_of_instances": 66,
1029
+ "counts": [
1030
+ 1326,
1031
+ 841,
1032
+ 567,
1033
+ 389
1034
+ ],
1035
+ "totals": [
1036
+ 4837,
1037
+ 4771,
1038
+ 4705,
1039
+ 4639
1040
+ ],
1041
+ "precisions": [
1042
+ 0.2741368616911309,
1043
+ 0.17627331796269124,
1044
+ 0.12051009564293304,
1045
+ 0.0838542789394266
1046
+ ],
1047
+ "bp": 1.0,
1048
+ "sys_len": 4837,
1049
+ "ref_len": 1949,
1050
+ "sacrebleu": 0.14865368140396532,
1051
+ "score": 0.14865368140396532,
1052
+ "score_name": "sacrebleu",
1053
+ "score_ci_low": 0.12285617051153769,
1054
+ "score_ci_high": 0.20722793764019112,
1055
+ "sacrebleu_ci_low": 0.12285617051153769,
1056
+ "sacrebleu_ci_high": 0.20722793764019112
1057
+ },
1058
+ "mt_flores_101_eng_spa": {
1059
+ "num_of_instances": 66,
1060
+ "counts": [
1061
+ 1381,
1062
+ 759,
1063
+ 459,
1064
+ 281
1065
+ ],
1066
+ "totals": [
1067
+ 6405,
1068
+ 6339,
1069
+ 6273,
1070
+ 6207
1071
+ ],
1072
+ "precisions": [
1073
+ 0.21561280249804843,
1074
+ 0.11973497397065783,
1075
+ 0.07317073170731708,
1076
+ 0.04527146769776059
1077
+ ],
1078
+ "bp": 1.0,
1079
+ "sys_len": 6405,
1080
+ "ref_len": 2098,
1081
+ "sacrebleu": 0.09616441307249027,
1082
+ "score": 0.09616441307249027,
1083
+ "score_name": "sacrebleu",
1084
+ "score_ci_low": 0.07930542472032699,
1085
+ "score_ci_high": 0.11825351259228321,
1086
+ "sacrebleu_ci_low": 0.07930542472032699,
1087
+ "sacrebleu_ci_high": 0.11825351259228321
1088
+ },
1089
+ "mt_flores_101_fra_eng": {
1090
+ "num_of_instances": 66,
1091
+ "counts": [
1092
+ 1391,
1093
+ 930,
1094
+ 644,
1095
+ 448
1096
+ ],
1097
+ "totals": [
1098
+ 5836,
1099
+ 5770,
1100
+ 5704,
1101
+ 5638
1102
+ ],
1103
+ "precisions": [
1104
+ 0.23834818368745714,
1105
+ 0.1611785095320624,
1106
+ 0.11290322580645162,
1107
+ 0.07946080170273147
1108
+ ],
1109
+ "bp": 1.0,
1110
+ "sys_len": 5836,
1111
+ "ref_len": 1734,
1112
+ "sacrebleu": 0.13625252798661136,
1113
+ "score": 0.13625252798661136,
1114
+ "score_name": "sacrebleu",
1115
+ "score_ci_low": 0.11732338312550004,
1116
+ "score_ci_high": 0.15953611321490402,
1117
+ "sacrebleu_ci_low": 0.11732338312550004,
1118
+ "sacrebleu_ci_high": 0.15953611321490402
1119
+ },
1120
+ "mt_flores_101_jpn_eng": {
1121
+ "num_of_instances": 66,
1122
+ "counts": [
1123
+ 1250,
1124
+ 617,
1125
+ 343,
1126
+ 187
1127
+ ],
1128
+ "totals": [
1129
+ 6443,
1130
+ 6377,
1131
+ 6311,
1132
+ 6245
1133
+ ],
1134
+ "precisions": [
1135
+ 0.1940090020176936,
1136
+ 0.09675395954210445,
1137
+ 0.054349548407542386,
1138
+ 0.029943955164131304
1139
+ ],
1140
+ "bp": 1.0,
1141
+ "sys_len": 6443,
1142
+ "ref_len": 1734,
1143
+ "sacrebleu": 0.07434451508550546,
1144
+ "score": 0.07434451508550546,
1145
+ "score_name": "sacrebleu",
1146
+ "score_ci_low": 0.0644058696100964,
1147
+ "score_ci_high": 0.08685819903559298,
1148
+ "sacrebleu_ci_low": 0.0644058696100964,
1149
+ "sacrebleu_ci_high": 0.08685819903559298
1150
+ },
1151
+ "mt_flores_101_kor_eng": {
1152
+ "num_of_instances": 66,
1153
+ "counts": [
1154
+ 1162,
1155
+ 546,
1156
+ 295,
1157
+ 168
1158
+ ],
1159
+ "totals": [
1160
+ 5914,
1161
+ 5848,
1162
+ 5782,
1163
+ 5716
1164
+ ],
1165
+ "precisions": [
1166
+ 0.19648292188028407,
1167
+ 0.09336525307797537,
1168
+ 0.05102040816326531,
1169
+ 0.02939118264520644
1170
+ ],
1171
+ "bp": 1.0,
1172
+ "sys_len": 5914,
1173
+ "ref_len": 1734,
1174
+ "sacrebleu": 0.07242154227292893,
1175
+ "score": 0.07242154227292893,
1176
+ "score_name": "sacrebleu",
1177
+ "score_ci_low": 0.06291349695976677,
1178
+ "score_ci_high": 0.08531939881489306,
1179
+ "sacrebleu_ci_low": 0.06291349695976677,
1180
+ "sacrebleu_ci_high": 0.08531939881489306
1181
+ },
1182
+ "mt_flores_101_por_eng": {
1183
+ "num_of_instances": 66,
1184
+ "counts": [
1185
+ 1417,
1186
+ 974,
1187
+ 723,
1188
+ 547
1189
+ ],
1190
+ "totals": [
1191
+ 6392,
1192
+ 6326,
1193
+ 6260,
1194
+ 6194
1195
+ ],
1196
+ "precisions": [
1197
+ 0.22168335419274093,
1198
+ 0.15396775213404995,
1199
+ 0.11549520766773164,
1200
+ 0.08831126896997095
1201
+ ],
1202
+ "bp": 1.0,
1203
+ "sys_len": 6392,
1204
+ "ref_len": 1734,
1205
+ "sacrebleu": 0.13659529350611513,
1206
+ "score": 0.13659529350611513,
1207
+ "score_name": "sacrebleu",
1208
+ "score_ci_low": 0.11528113587288888,
1209
+ "score_ci_high": 0.15836557569065857,
1210
+ "sacrebleu_ci_low": 0.11528113587288888,
1211
+ "sacrebleu_ci_high": 0.15836557569065857
1212
+ },
1213
+ "mt_flores_101_ron_eng": {
1214
+ "num_of_instances": 66,
1215
+ "counts": [
1216
+ 1395,
1217
+ 948,
1218
+ 686,
1219
+ 506
1220
+ ],
1221
+ "totals": [
1222
+ 5680,
1223
+ 5614,
1224
+ 5548,
1225
+ 5482
1226
+ ],
1227
+ "precisions": [
1228
+ 0.24559859154929575,
1229
+ 0.16886355539722125,
1230
+ 0.12364816149963952,
1231
+ 0.09230207953301715
1232
+ ],
1233
+ "bp": 1.0,
1234
+ "sys_len": 5680,
1235
+ "ref_len": 1734,
1236
+ "sacrebleu": 0.14749939449155924,
1237
+ "score": 0.14749939449155924,
1238
+ "score_name": "sacrebleu",
1239
+ "score_ci_low": 0.1292139766827155,
1240
+ "score_ci_high": 0.1819243976706418,
1241
+ "sacrebleu_ci_low": 0.1292139766827155,
1242
+ "sacrebleu_ci_high": 0.1819243976706418
1243
+ },
1244
+ "mt_flores_101_spa_eng": {
1245
+ "num_of_instances": 66,
1246
+ "counts": [
1247
+ 1300,
1248
+ 712,
1249
+ 450,
1250
+ 290
1251
+ ],
1252
+ "totals": [
1253
+ 6166,
1254
+ 6100,
1255
+ 6034,
1256
+ 5968
1257
+ ],
1258
+ "precisions": [
1259
+ 0.21083360363282516,
1260
+ 0.11672131147540984,
1261
+ 0.07457739476300962,
1262
+ 0.04859249329758713
1263
+ ],
1264
+ "bp": 1.0,
1265
+ "sys_len": 6166,
1266
+ "ref_len": 1734,
1267
+ "sacrebleu": 0.09717769541368253,
1268
+ "score": 0.09717769541368253,
1269
+ "score_name": "sacrebleu",
1270
+ "score_ci_low": 0.08585452687395417,
1271
+ "score_ci_high": 0.11046953561336194,
1272
+ "sacrebleu_ci_low": 0.08585452687395417,
1273
+ "sacrebleu_ci_high": 0.11046953561336194
1274
+ },
1275
+ "score": 0.11283376706964308,
1276
+ "score_name": "subsets_mean",
1277
+ "num_of_instances": 990
1278
+ },
1279
+ "score": 0.4523639482757903,
1280
+ "score_name": "subsets_mean",
1281
+ "num_of_instances": 12472
1282
+ }
1283
+ }