Datasets:
File size: 947 Bytes
3bcc9ee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import os
import tarfile
import tensorflow_datasets as tfds
from huggingface_hub import hf_hub_download
DATA_DIR="/home/robot"
FILENAME="data.tar.xz"
EXTRACTED_FILENAME="data"
FILEPATH=os.path.join(DATA_DIR, FILENAME)
EXTRACTED_FILEPATH=os.path.join(DATA_DIR, EXTRACTED_FILENAME)
# download data from huggingface
hf_hub_download(
repo_id="peterdavidfagan/transporter_networks",
repo_type="dataset",
filename=FILENAME,
local_dir=DATA_DIR,
)
# uncompress file
with tarfile.open(FILEPATH, 'r:xz') as tar:
tar.extractall(path=DATA_DIR)
os.remove(FILEPATH)
# load with tfds
ds = tfds.builder_from_directory(EXTRACTED_FILEPATH).as_dataset()['train']
# basic inspection of data
print(ds.element_spec)
for eps in ds:
print(eps["extrinsics"])
for step in eps["steps"]:
print(step["is_first"])
print(step["is_last"])
print(step["is_terminal"])
print(step["action"])
|