Upload nle_hf_dataset.py
Browse files- nle_hf_dataset.py +14 -17
nle_hf_dataset.py
CHANGED
@@ -18,6 +18,7 @@ import h5py
|
|
18 |
import json
|
19 |
import os
|
20 |
import datasets
|
|
|
21 |
|
22 |
|
23 |
_CITATION = """\
|
@@ -38,8 +39,8 @@ _HOMEPAGE = ""
|
|
38 |
_LICENSE = ""
|
39 |
|
40 |
_URLS = {
|
41 |
-
"data": "https://huggingface.co/datasets/Howuhh/nle_hf_dataset/tree/main/data",
|
42 |
-
"metadata": "https://huggingface.co/datasets/Howuhh/nle_hf_dataset/tree/main/metadata",
|
43 |
}
|
44 |
|
45 |
class NleHfDataset(datasets.GeneratorBasedBuilder):
|
@@ -50,7 +51,7 @@ class NleHfDataset(datasets.GeneratorBasedBuilder):
|
|
50 |
datasets.BuilderConfig(name="data", version=VERSION, description="Data for all episodes"),
|
51 |
datasets.BuilderConfig(name="metadata", version=VERSION, description="Metadata for all episodes"),
|
52 |
]
|
53 |
-
DEFAULT_CONFIG_NAME = "metadata"
|
54 |
|
55 |
def _info(self):
|
56 |
if self.config.name == "metadata":
|
@@ -107,24 +108,20 @@ class NleHfDataset(datasets.GeneratorBasedBuilder):
|
|
107 |
|
108 |
def _split_generators(self, dl_manager):
|
109 |
urls = _URLS[self.config.name]
|
110 |
-
|
111 |
return [
|
112 |
datasets.SplitGenerator(
|
113 |
-
name=datasets.Split.TRAIN,
|
114 |
-
gen_kwargs={"filepath": data_dir, "split": "train"},
|
115 |
-
)
|
116 |
]
|
117 |
|
118 |
-
def _generate_examples(self,
|
119 |
-
|
120 |
-
|
121 |
-
with open(
|
122 |
data = json.loads(f.read())
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
for i in range(1, 5):
|
127 |
-
with h5py.File(os.path.join(filepath, f"{i}.hdf5"), "r") as f:
|
128 |
yield i, {
|
129 |
"tty_chars": f["tty_chars"][()],
|
130 |
"tty_colors": f["tty_colors"][()],
|
@@ -132,4 +129,4 @@ class NleHfDataset(datasets.GeneratorBasedBuilder):
|
|
132 |
"actions": f["actions"][()],
|
133 |
"rewards": f["rewards"][()],
|
134 |
"dones": f["dones"][()]
|
135 |
-
}
|
|
|
18 |
import json
|
19 |
import os
|
20 |
import datasets
|
21 |
+
from glob import glob
|
22 |
|
23 |
|
24 |
_CITATION = """\
|
|
|
39 |
_LICENSE = ""
|
40 |
|
41 |
_URLS = {
|
42 |
+
"data": glob("https://huggingface.co/datasets/Howuhh/nle_hf_dataset/tree/main/data/*.json"),
|
43 |
+
"metadata": glob("https://huggingface.co/datasets/Howuhh/nle_hf_dataset/tree/main/metadata/*.hdf5"),
|
44 |
}
|
45 |
|
46 |
class NleHfDataset(datasets.GeneratorBasedBuilder):
|
|
|
51 |
datasets.BuilderConfig(name="data", version=VERSION, description="Data for all episodes"),
|
52 |
datasets.BuilderConfig(name="metadata", version=VERSION, description="Metadata for all episodes"),
|
53 |
]
|
54 |
+
DEFAULT_CONFIG_NAME = "metadata"
|
55 |
|
56 |
def _info(self):
|
57 |
if self.config.name == "metadata":
|
|
|
108 |
|
109 |
def _split_generators(self, dl_manager):
|
110 |
urls = _URLS[self.config.name]
|
111 |
+
filepaths = [dl_manager.download(url) for url in urls]
|
112 |
return [
|
113 |
datasets.SplitGenerator(
|
114 |
+
name=datasets.Split.TRAIN, gen_kwargs={"filepaths": filepaths})
|
|
|
|
|
115 |
]
|
116 |
|
117 |
+
def _generate_examples(self, filepaths):
|
118 |
+
for filepath in filepaths:
|
119 |
+
if self.config.name == "metadata":
|
120 |
+
with open(filepath, encoding="utf-8") as f:
|
121 |
data = json.loads(f.read())
|
122 |
+
yield i, data
|
123 |
+
else:
|
124 |
+
with h5py.File(filepath, "r") as f:
|
|
|
|
|
125 |
yield i, {
|
126 |
"tty_chars": f["tty_chars"][()],
|
127 |
"tty_colors": f["tty_colors"][()],
|
|
|
129 |
"actions": f["actions"][()],
|
130 |
"rewards": f["rewards"][()],
|
131 |
"dones": f["dones"][()]
|
132 |
+
}
|