Gabriel Kressin Palacios commited on
Commit
183c5df
1 Parent(s): 3ecf042

zipped data

Browse files
.gitattributes CHANGED
@@ -37,3 +37,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
37
  *.wav filter=lfs diff=lfs merge=lfs -text
38
  wikitext103/ filter=lfs diff=lfs merge=lfs -text
39
  wikitext2 filter=lfs diff=lfs merge=lfs -text
 
 
 
37
  *.wav filter=lfs diff=lfs merge=lfs -text
38
  wikitext103/ filter=lfs diff=lfs merge=lfs -text
39
  wikitext2 filter=lfs diff=lfs merge=lfs -text
40
+ wikitext103.zip filter=lfs diff=lfs merge=lfs -text
41
+ wikitext2.zip filter=lfs diff=lfs merge=lfs -text
wikitext103/train.parquet → wikitext103.zip RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f73231cf4fbf2ce77a7762dc8662656baaecebbe06ef0a9eaf60ac0c5646d9c
3
- size 1313130334
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f612dc1711160c9c65ee7d4fabd51bfce94ab972761195b4641a466d31e2e92
3
+ size 1106715044
wikitext103/validation.parquet DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:08199a4d18e9ce663b52948d54f64f6e65207471a1d2c7ac917a1c26642c0b9e
3
- size 2755917
 
 
 
 
wikitext103/test.parquet → wikitext2.zip RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:791c364b0c84bfff37738a8a12c6c08374d354cc2dc19d8c2fb540bcd9690143
3
- size 3121651
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9887782ff6dad83530d7bb4b4d4b120f4c9c08749ecc97800fa38772f2748b2f
3
+ size 27293137
wikitext2/test.parquet DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4674d94eff80195b563cb9a2a1bc97aa25ef7a6b500bf2257036c14bdd0ae306
3
- size 3127331
 
 
 
 
wikitext2/train.parquet DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:722ecc0b545203de71e6891b3b8b92f1f1cec301b95f68380adef222a5da3777
3
- size 26645424
 
 
 
 
wikitext2/validation.parquet DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f48f826bfe84b977f6d983f37ee08fab0dce48c3ec187387836661b242e807ff
3
- size 2763767
 
 
 
 
wikitext_linked.py CHANGED
@@ -82,6 +82,8 @@ FEATURES = datasets.Features(
82
  }
83
  )
84
 
 
 
85
 
86
  class WikitextLinked(datasets.ArrowBasedBuilder):
87
  """wikitext_linked is an annotated and linked version from wikitext. Wikitext is a
@@ -96,13 +98,13 @@ class WikitextLinked(datasets.ArrowBasedBuilder):
96
  name="wikitext2",
97
  version=VERSION,
98
  description="The small version",
99
- data_dir="wikitext2",
100
  ),
101
  datasets.BuilderConfig(
102
  name="wikitext103",
103
  version=VERSION,
104
  description="The big version",
105
- data_dir="wikitext103",
106
  ),
107
  ]
108
 
@@ -117,26 +119,27 @@ class WikitextLinked(datasets.ArrowBasedBuilder):
117
  )
118
 
119
  def _split_generators(self, dl_manager):
 
120
  return [
121
  datasets.SplitGenerator(
122
  name=datasets.Split.TRAIN,
123
  # These kwargs will be passed to _generate_examples
124
  gen_kwargs={
125
- "filepath": os.path.join(self.config.data_dir, "train.parquet"),
126
  },
127
  ),
128
  datasets.SplitGenerator(
129
  name=datasets.Split.VALIDATION,
130
  # These kwargs will be passed to _generate_examples
131
  gen_kwargs={
132
- "filepath": os.path.join(self.config.data_dir, "validation.parquet"),
133
  },
134
  ),
135
  datasets.SplitGenerator(
136
  name=datasets.Split.TEST,
137
  # These kwargs will be passed to _generate_examples
138
  gen_kwargs={
139
- "filepath": os.path.join(self.config.data_dir, "test.parquet"),
140
  },
141
  ),
142
  ]
 
82
  }
83
  )
84
 
85
+ _URL = "https://huggingface.co/datasets/gabrielkp/wikitext_linked/raw/main/"
86
+
87
 
88
  class WikitextLinked(datasets.ArrowBasedBuilder):
89
  """wikitext_linked is an annotated and linked version from wikitext. Wikitext is a
 
98
  name="wikitext2",
99
  version=VERSION,
100
  description="The small version",
101
+ data_dir="wikitext2.zip",
102
  ),
103
  datasets.BuilderConfig(
104
  name="wikitext103",
105
  version=VERSION,
106
  description="The big version",
107
+ data_dir="wikitext103.zip",
108
  ),
109
  ]
110
 
 
119
  )
120
 
121
  def _split_generators(self, dl_manager):
122
+ data_dir = dl_manager.download_and_extract(f"{_URL}{self.config.data_dir}")
123
  return [
124
  datasets.SplitGenerator(
125
  name=datasets.Split.TRAIN,
126
  # These kwargs will be passed to _generate_examples
127
  gen_kwargs={
128
+ "filepath": os.path.join(data_dir, "train.parquet"),
129
  },
130
  ),
131
  datasets.SplitGenerator(
132
  name=datasets.Split.VALIDATION,
133
  # These kwargs will be passed to _generate_examples
134
  gen_kwargs={
135
+ "filepath": os.path.join(data_dir, "validation.parquet"),
136
  },
137
  ),
138
  datasets.SplitGenerator(
139
  name=datasets.Split.TEST,
140
  # These kwargs will be passed to _generate_examples
141
  gen_kwargs={
142
+ "filepath": os.path.join(data_dir, "test.parquet"),
143
  },
144
  ),
145
  ]