Datasets:
Delete Matrix.py
Browse files
Matrix.py
DELETED
@@ -1,121 +0,0 @@
|
|
1 |
-
# -*- coding: utf-8 -*-
|
2 |
-
|
3 |
-
import glob
|
4 |
-
import orjson
|
5 |
-
import os
|
6 |
-
|
7 |
-
import datasets
|
8 |
-
from itertools import islice
|
9 |
-
|
10 |
-
_DESCRIPTION = """
|
11 |
-
An open-source pretraining dataset containing 4690 billion tokens,
|
12 |
-
this bilingual dataset with both English and Chinese texts is used for training neo models.
|
13 |
-
"""
|
14 |
-
|
15 |
-
_CITATION = """
|
16 |
-
@article{zhang2024mapneo,
|
17 |
-
title = {MAP-Neo: Highly Capable and Transparent Bilingual Large Language Model Series},
|
18 |
-
author = {
|
19 |
-
Ge Zhang and
|
20 |
-
Scott Qu and
|
21 |
-
Jiaheng Liu and
|
22 |
-
Chenchen Zhang and
|
23 |
-
Chenghua Lin and
|
24 |
-
Chou Leuang Yu and
|
25 |
-
Danny Pan and
|
26 |
-
Esther Cheng and
|
27 |
-
Jie Liu and
|
28 |
-
Qunshu Lin and
|
29 |
-
Raven Yuan and
|
30 |
-
Tuney Zheng and
|
31 |
-
Wei Pang and
|
32 |
-
Xinrun Du and
|
33 |
-
Yiming Liang and
|
34 |
-
Yinghao Ma and
|
35 |
-
Yizhi Li and
|
36 |
-
Ziyang Ma and
|
37 |
-
Bill Lin and
|
38 |
-
Emmanouil Benetos and
|
39 |
-
Huan Yang and
|
40 |
-
Junting Zhou and
|
41 |
-
Kaijing Ma and
|
42 |
-
Minghao Liu and
|
43 |
-
Morry Niu and
|
44 |
-
Noah Wang and
|
45 |
-
Quehry Que and
|
46 |
-
Ruibo Liu and
|
47 |
-
Sine Liu and
|
48 |
-
Shawn Guo and
|
49 |
-
Soren Gao and
|
50 |
-
Wangchunshu Zhou and
|
51 |
-
Xinyue Zhang and
|
52 |
-
Yizhi Zhou and
|
53 |
-
Yubo Wang and
|
54 |
-
Yuelin Bai and
|
55 |
-
Yuhan Zhang and
|
56 |
-
Yuxiang Zhang and
|
57 |
-
Zenith Wang and
|
58 |
-
Zhenzhu Yang and
|
59 |
-
Zijian Zhao and
|
60 |
-
Jiajun Zhang and
|
61 |
-
Wanli Ouyang and
|
62 |
-
Wenhao Huang and
|
63 |
-
Wenhu Chen
|
64 |
-
},
|
65 |
-
year = {2024},
|
66 |
-
journal = {arXiv preprint arXiv: 2405.19327}
|
67 |
-
}
|
68 |
-
"""
|
69 |
-
|
70 |
-
_HOMEPAGE = "https://huggingface.co/datasets/m-a-p/Matrix"
|
71 |
-
|
72 |
-
|
73 |
-
class MatrixDataset(datasets.GeneratorBasedBuilder):
|
74 |
-
"""Custom dataset for JSON files with filtering capabilities."""
|
75 |
-
|
76 |
-
VERSION = datasets.Version("1.0.0")
|
77 |
-
|
78 |
-
def _info(self):
|
79 |
-
return datasets.DatasetInfo(
|
80 |
-
description=_DESCRIPTION,
|
81 |
-
features=datasets.Features({
|
82 |
-
"id": datasets.Value("string"),
|
83 |
-
"text": datasets.Value("string"),
|
84 |
-
}),
|
85 |
-
homepage=_HOMEPAGE,
|
86 |
-
citation=_CITATION,
|
87 |
-
)
|
88 |
-
|
89 |
-
def _split_generators(self, dl_manager):
|
90 |
-
"""Returns SplitGenerators."""
|
91 |
-
import random
|
92 |
-
|
93 |
-
data_files = glob.glob("*/*.jsonl")
|
94 |
-
data_shards = []
|
95 |
-
for filepath in data_files:
|
96 |
-
# max size of each shard is 1GB
|
97 |
-
num_shards = -os.path.getsize(filepath) // -1024**3
|
98 |
-
for i in range(num_shards):
|
99 |
-
data_shards.append((filepath, i, num_shards))
|
100 |
-
random.Random(42).shuffle(data_shards)
|
101 |
-
|
102 |
-
return [
|
103 |
-
datasets.SplitGenerator(
|
104 |
-
name=datasets.Split.TRAIN,
|
105 |
-
gen_kwargs={
|
106 |
-
"data_shards": data_shards,
|
107 |
-
},
|
108 |
-
),
|
109 |
-
]
|
110 |
-
|
111 |
-
def _generate_examples(self, data_shards):
|
112 |
-
for file, split, num_shards in data_shards:
|
113 |
-
with open(file, "r") as f:
|
114 |
-
for i, line in islice(enumerate(f), split, None, num_shards):
|
115 |
-
data = orjson.loads(line)
|
116 |
-
if 'id' not in data:
|
117 |
-
data['id'] = f"{file}_{i}"
|
118 |
-
if 'content' in data and 'text' not in data:
|
119 |
-
data['text'] = data.pop('content')
|
120 |
-
if data['text'] is not None:
|
121 |
-
yield data["id"], data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|