nld-aa-taster / nld-aa-taster.py
Howuhh's picture
update for streaming
b9ef731
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Dungeons and Data: A Large-Scale NetHack Dataset. """
import glob
import h5py
import json
import os
import datasets
from datasets.download.streaming_download_manager import xopen
_CITATION = """\
"""
_DESCRIPTION = """\
3 billion state-action-score transitions from 100,000 trajectories collected from the symbolic bot winner of the NetHack Challenge 2021.
"""
_HOMEPAGE = ""
_LICENSE = ""
class NLEDataset(datasets.GeneratorBasedBuilder):
"""Dungeons and Data: A Large-Scale NetHack Dataset."""
VERSION = datasets.Version("1.0.0")
DEFAULT_CONFIG_NAME = "mon-hum-neu"
def _info(self):
features = datasets.Features(
{
"data": {
"tty_chars": datasets.Array3D(shape=(None, 24, 80), dtype="uint8"),
"tty_colors": datasets.Array3D(shape=(None, 24, 80), dtype="int8"),
"tty_cursor": datasets.Array2D(shape=(None, 2), dtype="int16"),
"actions": datasets.Sequence(datasets.Value("int16")),
"rewards": datasets.Sequence(datasets.Value("int32")),
"dones": datasets.Sequence(datasets.Value("bool")),
},
"metadata": {
"gameid": datasets.Value("int32"),
"version": datasets.Value("string"),
"points": datasets.Value("int32"),
"deathdnum": datasets.Value("int32"),
"deathlev": datasets.Value("int32"),
"maxlvl": datasets.Value("int32"),
"hp": datasets.Value("int32"),
"maxhp": datasets.Value("int32"),
"deaths": datasets.Value("int32"),
"deathdate": datasets.Value("int32"),
"birthdate": datasets.Value("int32"),
"uid": datasets.Value("int32"),
"role": datasets.Value("string"),
"race": datasets.Value("string"),
"gender": datasets.Value("string"),
"align": datasets.Value("string"),
"name": datasets.Value("string"),
"death": datasets.Value("string"),
"conduct": datasets.Value("string"),
"turns": datasets.Value("int32"),
"achieve": datasets.Value("string"),
"realtime": datasets.Value("int64"),
"starttime": datasets.Value("int64"),
"endtime": datasets.Value("int64"),
"gender0": datasets.Value("string"),
"align0": datasets.Value("string"),
"flags": datasets.Value("string")
}
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
# data_file = dl_manager.download_and_extract(f"data/data-{self.config.name}-any.hdf5.zip")
data_file = dl_manager.download(f"data/data-{self.config.name}-any.hdf5")
metadata_file = dl_manager.download(f"data/metadata-{self.config.name}-any.json")
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"data_file": data_file,
"metadata_file": metadata_file,
"dl_manager": dl_manager
}
)
]
def _generate_examples(self, data_file, metadata_file, dl_manager):
if dl_manager.is_streaming:
data_file = xopen(data_file, "rb")
with h5py.File(data_file, "r") as df, xopen(metadata_file, "r") as f:
# this thing is super small, so we will load it all
meta = json.load(f)
for i, (ep_key, ep_meta) in enumerate(zip(df["/"], meta)):
assert int(ep_key) == int(ep_meta["gameid"])
yield i, {
"data": {
"tty_chars": df[f"{ep_key}/tty_chars"][()],
"tty_colors": df[f"{ep_key}/tty_colors"][()],
"tty_cursor": df[f"{ep_key}/tty_cursor"][()],
"actions": df[f"{ep_key}/actions"][()],
"rewards": df[f"{ep_key}/rewards"][()],
"dones": df[f"{ep_key}/dones"][()]
},
"metadata": ep_meta
}
if dl_manager.is_streaming:
data_file.close()