Datasets:
Deepchung
commited on
Commit
·
e8c4357
1
Parent(s):
5ff1484
update to 128k
Browse files
M4LE.py
DELETED
@@ -1,149 +0,0 @@
|
|
1 |
-
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
|
2 |
-
#
|
3 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
-
# you may not use this file except in compliance with the License.
|
5 |
-
# You may obtain a copy of the License at
|
6 |
-
#
|
7 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
-
#
|
9 |
-
# Unless required by applicable law or agreed to in writing, software
|
10 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
-
# See the License for the specific language governing permissions and
|
13 |
-
# limitations under the License.
|
14 |
-
import os
|
15 |
-
|
16 |
-
import datasets
|
17 |
-
import json
|
18 |
-
|
19 |
-
|
20 |
-
_DESCRIPTION = (
|
21 |
-
"M4LE is a systematic and comprehensive long-context benchmark. It aims to"
|
22 |
-
" evaluate LM performances in five long-context understanding abilities,"
|
23 |
-
" across multiple domains, languages and task types."
|
24 |
-
)
|
25 |
-
_HOMEPAGE = "https://github.com/KwanWaiChung/M4LE"
|
26 |
-
_LICENSE = """MIT License
|
27 |
-
Copyright (c) 2023 Wai-Chung Kwan
|
28 |
-
|
29 |
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
30 |
-
of this software and associated documentation files (the "Software"), to deal
|
31 |
-
in the Software without restriction, including without limitation the rights
|
32 |
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
33 |
-
copies of the Software, and to permit persons to whom the Software is
|
34 |
-
furnished to do so, subject to the following conditions:
|
35 |
-
|
36 |
-
The above copyright notice and this permission notice shall be included in all
|
37 |
-
copies or substantial portions of the Software.
|
38 |
-
|
39 |
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
40 |
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
41 |
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
42 |
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
43 |
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
44 |
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
45 |
-
SOFTWARE."""
|
46 |
-
URL = r"https://huggingface.co/datasets/wckwan/M4LE/resolve/main/data.zip"
|
47 |
-
|
48 |
-
|
49 |
-
tasks = [
|
50 |
-
"arxiv",
|
51 |
-
"bigpatent_global_cls",
|
52 |
-
"bigpatent_global_sum",
|
53 |
-
"booksum",
|
54 |
-
"c3",
|
55 |
-
"cepsum",
|
56 |
-
"clts+",
|
57 |
-
"cnewsum",
|
58 |
-
"cnnnews",
|
59 |
-
"drcd_explicit-single",
|
60 |
-
"drcd_semantic-single",
|
61 |
-
"duorc",
|
62 |
-
"dureader",
|
63 |
-
"hotpotqa",
|
64 |
-
"lcsts",
|
65 |
-
"marc",
|
66 |
-
"mnds-news_explicit-single",
|
67 |
-
"mnds-news_explicit-multiple",
|
68 |
-
"mnds-news_semantic-multiple",
|
69 |
-
"ncls",
|
70 |
-
"news-commentary-en2zh",
|
71 |
-
"news-commentary-zh2en",
|
72 |
-
"news2016",
|
73 |
-
"newsqa",
|
74 |
-
"nq-open",
|
75 |
-
"online-shopping",
|
76 |
-
"open-subtitles-en2zh",
|
77 |
-
"open-subtitles-zh2en",
|
78 |
-
"pubmed",
|
79 |
-
"tedtalks-en2zh",
|
80 |
-
"tedtalks-zh2en",
|
81 |
-
"thucnews_explicit-single",
|
82 |
-
"thucnews_explicit-multiple",
|
83 |
-
"thucnews_semantic-multiple",
|
84 |
-
"triviaqa",
|
85 |
-
"wiki2019zh",
|
86 |
-
"wikihow",
|
87 |
-
"wikitext-103",
|
88 |
-
"wow",
|
89 |
-
]
|
90 |
-
|
91 |
-
|
92 |
-
class M4LEConfig(datasets.BuilderConfig):
|
93 |
-
def __init__(self, **kwargs):
|
94 |
-
super().__init__(version=datasets.Version("1.0.0"), **kwargs)
|
95 |
-
|
96 |
-
|
97 |
-
class LongBench(datasets.GeneratorBasedBuilder):
|
98 |
-
BUILDER_CONFIGS = [
|
99 |
-
M4LEConfig(
|
100 |
-
name=task,
|
101 |
-
)
|
102 |
-
for task in tasks
|
103 |
-
]
|
104 |
-
|
105 |
-
def _info(self):
|
106 |
-
features = datasets.Features(
|
107 |
-
{
|
108 |
-
"instruction": datasets.Value("string"),
|
109 |
-
"input": datasets.Value("string"),
|
110 |
-
"answers": [datasets.Value("string")],
|
111 |
-
"input_length": datasets.Value("int32"),
|
112 |
-
"total_length": datasets.Value("int32"),
|
113 |
-
"length_bucket": datasets.Value("int32"),
|
114 |
-
}
|
115 |
-
)
|
116 |
-
return datasets.DatasetInfo(
|
117 |
-
description=_DESCRIPTION,
|
118 |
-
features=features,
|
119 |
-
homepage=_HOMEPAGE,
|
120 |
-
license=_LICENSE,
|
121 |
-
)
|
122 |
-
|
123 |
-
def _split_generators(self, dl_manager):
|
124 |
-
data_dir = dl_manager.download_and_extract(URL)
|
125 |
-
task_name = self.config.name
|
126 |
-
return [
|
127 |
-
datasets.SplitGenerator(
|
128 |
-
name=datasets.Split.TEST,
|
129 |
-
gen_kwargs={
|
130 |
-
"filepath": os.path.join(
|
131 |
-
data_dir, "data", f"{task_name}.jsonl"
|
132 |
-
),
|
133 |
-
},
|
134 |
-
)
|
135 |
-
]
|
136 |
-
|
137 |
-
def _generate_examples(self, filepath):
|
138 |
-
with open(filepath, encoding="utf-8") as f:
|
139 |
-
for idx, line in enumerate(f):
|
140 |
-
key = f"{self.config.name}-{idx}"
|
141 |
-
item = json.loads(line)
|
142 |
-
yield key, {
|
143 |
-
"instruction": item["instruction"],
|
144 |
-
"input": item["input"],
|
145 |
-
"answers": item["answers"],
|
146 |
-
"input_length": item["input_length"],
|
147 |
-
"total_length": item["total_length"],
|
148 |
-
"length_bucket": item["length_bucket"],
|
149 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
README.md
CHANGED
@@ -13,6 +13,163 @@ tags:
|
|
13 |
- Long Context
|
14 |
size_categories:
|
15 |
- 1K<n<10K
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
---
|
17 |
|
18 |
## Introduction
|
|
|
13 |
- Long Context
|
14 |
size_categories:
|
15 |
- 1K<n<10K
|
16 |
+
configs:
|
17 |
+
- config_name: mnds-news_semantic-multiple
|
18 |
+
data_files:
|
19 |
+
- split: test
|
20 |
+
path: classification/mnds-news_semantic-multiple.jsonl
|
21 |
+
- config_name: thucnews_explicit-single
|
22 |
+
data_files:
|
23 |
+
- split: test
|
24 |
+
path: classification/thucnews_explicit-single.jsonl
|
25 |
+
- config_name: mnds-news_explicit-multiple
|
26 |
+
data_files:
|
27 |
+
- split: test
|
28 |
+
path: classification/mnds-news_explicit-multiple.jsonl
|
29 |
+
- config_name: thucnews_explicit-multiple
|
30 |
+
data_files:
|
31 |
+
- split: test
|
32 |
+
path: classification/thucnews_explicit-multiple.jsonl
|
33 |
+
- config_name: mnds-news_explicit-single
|
34 |
+
data_files:
|
35 |
+
- split: test
|
36 |
+
path: classification/mnds-news_explicit-single.jsonl
|
37 |
+
- config_name: bigpatent_global_cls
|
38 |
+
data_files:
|
39 |
+
- split: test
|
40 |
+
path: classification/bigpatent_global_cls.jsonl
|
41 |
+
- config_name: marc
|
42 |
+
data_files:
|
43 |
+
- split: test
|
44 |
+
path: classification/marc.jsonl
|
45 |
+
- config_name: thucnews_semantic-multiple
|
46 |
+
data_files:
|
47 |
+
- split: test
|
48 |
+
path: classification/thucnews_semantic-multiple.jsonl
|
49 |
+
- config_name: online-shopping
|
50 |
+
data_files:
|
51 |
+
- split: test
|
52 |
+
path: classification/online-shopping.jsonl
|
53 |
+
- config_name: wikitext-103
|
54 |
+
data_files:
|
55 |
+
- split: test
|
56 |
+
path: nli/wikitext-103.jsonl
|
57 |
+
- config_name: wiki2019zh
|
58 |
+
data_files:
|
59 |
+
- split: test
|
60 |
+
path: nli/wiki2019zh.jsonl
|
61 |
+
- config_name: tedtalks-zh2en
|
62 |
+
data_files:
|
63 |
+
- split: test
|
64 |
+
path: translation/tedtalks-zh2en.jsonl
|
65 |
+
- config_name: news-commentary-zh2en
|
66 |
+
data_files:
|
67 |
+
- split: test
|
68 |
+
path: translation/news-commentary-zh2en.jsonl
|
69 |
+
- config_name: open-subtitles-zh2en
|
70 |
+
data_files:
|
71 |
+
- split: test
|
72 |
+
path: translation/open-subtitles-zh2en.jsonl
|
73 |
+
- config_name: open-subtitles-en2zh
|
74 |
+
data_files:
|
75 |
+
- split: test
|
76 |
+
path: translation/open-subtitles-en2zh.jsonl
|
77 |
+
- config_name: news-commentary-en2zh
|
78 |
+
data_files:
|
79 |
+
- split: test
|
80 |
+
path: translation/news-commentary-en2zh.jsonl
|
81 |
+
- config_name: tedtalks-en2zh
|
82 |
+
data_files:
|
83 |
+
- split: test
|
84 |
+
path: translation/tedtalks-en2zh.jsonl
|
85 |
+
- config_name: cnnnews
|
86 |
+
data_files:
|
87 |
+
- split: test
|
88 |
+
path: summarization/cnnnews.jsonl
|
89 |
+
- config_name: clts
|
90 |
+
data_files:
|
91 |
+
- split: test
|
92 |
+
path: summarization/clts.jsonl
|
93 |
+
- config_name: cnewsum
|
94 |
+
data_files:
|
95 |
+
- split: test
|
96 |
+
path: summarization/cnewsum.jsonl
|
97 |
+
- config_name: booksum
|
98 |
+
data_files:
|
99 |
+
- split: test
|
100 |
+
path: summarization/booksum.jsonl
|
101 |
+
- config_name: cepsum
|
102 |
+
data_files:
|
103 |
+
- split: test
|
104 |
+
path: summarization/cepsum.jsonl
|
105 |
+
- config_name: pubmed
|
106 |
+
data_files:
|
107 |
+
- split: test
|
108 |
+
path: summarization/pubmed.jsonl
|
109 |
+
- config_name: lcsts
|
110 |
+
data_files:
|
111 |
+
- split: test
|
112 |
+
path: summarization/lcsts.jsonl
|
113 |
+
- config_name: news2016
|
114 |
+
data_files:
|
115 |
+
- split: test
|
116 |
+
path: summarization/news2016.jsonl
|
117 |
+
- config_name: arxiv
|
118 |
+
data_files:
|
119 |
+
- split: test
|
120 |
+
path: summarization/arxiv.jsonl
|
121 |
+
- config_name: wikihow
|
122 |
+
data_files:
|
123 |
+
- split: test
|
124 |
+
path: summarization/wikihow.jsonl
|
125 |
+
- config_name: bigpatent_global_sum
|
126 |
+
data_files:
|
127 |
+
- split: test
|
128 |
+
path: summarization/bigpatent_global_sum.jsonl
|
129 |
+
- config_name: ncls
|
130 |
+
data_files:
|
131 |
+
- split: test
|
132 |
+
path: summarization/ncls.jsonl
|
133 |
+
- config_name: drcd_semantic-single
|
134 |
+
data_files:
|
135 |
+
- split: test
|
136 |
+
path: qa/drcd_semantic-single.jsonl
|
137 |
+
- config_name: duorc
|
138 |
+
data_files:
|
139 |
+
- split: test
|
140 |
+
path: qa/duorc.jsonl
|
141 |
+
- config_name: nq-open
|
142 |
+
data_files:
|
143 |
+
- split: test
|
144 |
+
path: qa/nq-open.jsonl
|
145 |
+
- config_name: newsqa
|
146 |
+
data_files:
|
147 |
+
- split: test
|
148 |
+
path: qa/newsqa.jsonl
|
149 |
+
- config_name: triviaqa
|
150 |
+
data_files:
|
151 |
+
- split: test
|
152 |
+
path: qa/triviaqa.jsonl
|
153 |
+
- config_name: c3
|
154 |
+
data_files:
|
155 |
+
- split: test
|
156 |
+
path: qa/c3.jsonl
|
157 |
+
- config_name: dureader
|
158 |
+
data_files:
|
159 |
+
- split: test
|
160 |
+
path: qa/dureader.jsonl
|
161 |
+
- config_name: hotpotqa
|
162 |
+
data_files:
|
163 |
+
- split: test
|
164 |
+
path: qa/hotpotqa.jsonl
|
165 |
+
- config_name: wow
|
166 |
+
data_files:
|
167 |
+
- split: test
|
168 |
+
path: topic_retrieval/wow.jsonl
|
169 |
+
- config_name: drcd_explicit-single
|
170 |
+
data_files:
|
171 |
+
- split: test
|
172 |
+
path: topic_retrieval/drcd_explicit-single.jsonl
|
173 |
---
|
174 |
|
175 |
## Introduction
|
data.zip
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:fb158289e55e749619bfa24d4ca3e5ce64283aa6c7e98d51f31cb1fb5bd531b2
|
3 |
-
size 1090016266
|
|
|
|
|
|
|
|