|
import json |
|
import datasets |
|
from pathlib import Path |
|
|
|
_HOMEPAGE = 'https://cocodataset.org/' |
|
_LICENSE = 'Creative Commons Attribution 4.0 License' |
|
_DESCRIPTION = 'COCO is a large-scale object detection, segmentation, and captioning dataset.' |
|
_CITATION = '''\ |
|
@article{cocodataset, |
|
author = {Tsung{-}Yi Lin and Michael Maire and Serge J. Belongie and Lubomir D. Bourdev and Ross B. Girshick and James Hays and Pietro Perona and Deva Ramanan and Piotr Doll{'{a} }r and C. Lawrence Zitnick}, |
|
title = {Microsoft {COCO:} Common Objects in Context}, |
|
journal = {CoRR}, |
|
volume = {abs/1405.0312}, |
|
year = {2014}, |
|
url = {http://arxiv.org/abs/1405.0312}, |
|
archivePrefix = {arXiv}, |
|
eprint = {1405.0312}, |
|
timestamp = {Mon, 13 Aug 2018 16:48:13 +0200}, |
|
biburl = {https://dblp.org/rec/bib/journals/corr/LinMBHPRDZ14}, |
|
bibsource = {dblp computer science bibliography, https://dblp.org} |
|
} |
|
''' |
|
_NAMES = [ |
|
'banner', 'blanket', 'branch', 'bridge', 'building-other', 'bush', |
|
'cabinet', 'cage', 'cardboard', 'carpet', 'ceiling-other', 'ceiling-tile', |
|
'cloth', 'clothes', 'clouds', 'counter', 'cupboard', 'curtain', |
|
'desk-stuff', 'dirt', 'door-stuff', 'fence', 'floor-marble', |
|
'floor-other', 'floor-stone', 'floor-tile', 'floor-wood', 'flower', 'fog', |
|
'food-other', 'fruit', 'furniture-other', 'grass', 'gravel', 'ground-other', |
|
'hill', 'house', 'leaves', 'light', 'mat', 'metal', 'mirror-stuff', |
|
'moss', 'mountain', 'mud', 'napkin', 'net', 'paper', 'pavement', |
|
'pillow', 'plant-other', 'plastic', 'platform', 'playingfield', |
|
'railing', 'railroad', 'river', 'road', 'rock', 'roof', 'rug', 'salad', |
|
'sand', 'sea', 'shelf', 'sky-other', 'skyscraper', 'snow', 'solid-other', |
|
'stairs', 'stone', 'straw', 'structural-other', 'table', 'tent', |
|
'textile-other', 'towel', 'tree', 'vegetable', 'wall-brick', 'wall-concrete', |
|
'wall-other', 'wall-panel', 'wall-stone', 'wall-tile', 'wall-wood', |
|
'water-other', 'waterdrops', 'window-blind', 'window-other', 'wood', |
|
'other' |
|
] |
|
|
|
|
|
class COCOStuffConfig(datasets.BuilderConfig): |
|
'''Builder Config for coco2017''' |
|
|
|
def __init__( |
|
self, description, homepage, |
|
annotation_urls, **kwargs |
|
): |
|
super(COCOStuffConfig, self).__init__( |
|
version=datasets.Version('1.0.0', ''), |
|
**kwargs |
|
) |
|
self.description = description |
|
self.homepage = homepage |
|
url = 'http://images.cocodataset.org/zips/' |
|
self.train_image_url = url + 'train2017.zip' |
|
self.val_image_url = url + 'val2017.zip' |
|
self.train_annotation_urls = annotation_urls['train'] |
|
self.val_annotation_urls = annotation_urls['validation'] |
|
|
|
|
|
class COCOStuff(datasets.GeneratorBasedBuilder): |
|
BUILDER_CONFIGS = [ |
|
COCOStuffConfig( |
|
description=_DESCRIPTION, |
|
homepage=_HOMEPAGE, |
|
annotation_urls={ |
|
'train': 'data/stuff_train.zip', |
|
'validation': 'data/stuff_validation.zip' |
|
}, |
|
) |
|
] |
|
|
|
def _info(self): |
|
features = datasets.Features({ |
|
'image': datasets.Image(mode='RGB', decode=True, id=None), |
|
'categories': datasets.Sequence( |
|
feature=datasets.ClassLabel(names=_NAMES), |
|
length=-1, id=None |
|
), |
|
'sem.rles': datasets.Sequence( |
|
feature={ |
|
'size': datasets.Sequence( |
|
feature=datasets.Value(dtype='int32', id=None), |
|
length=2, id=None |
|
), |
|
'counts': datasets.Value(dtype='string', id=None) |
|
}, |
|
length=-1, id=None |
|
), |
|
}) |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=features, |
|
homepage=_HOMEPAGE, |
|
license=_LICENSE, |
|
citation=_CITATION |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
train_image_path = dl_manager.download_and_extract( |
|
self.config.train_image_url |
|
) |
|
val_image_path = dl_manager.download_and_extract( |
|
self.config.val_image_url |
|
) |
|
train_annotation_paths = dl_manager.download_and_extract( |
|
self.config.train_annotation_urls |
|
) |
|
val_annotation_paths = dl_manager.download_and_extract( |
|
self.config.val_annotation_urls |
|
) |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={ |
|
'image_path': f'{train_image_path}/train2017', |
|
'annotation_path': f'{train_annotation_paths}/stuff_train.jsonl' |
|
} |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.VALIDATION, |
|
gen_kwargs={ |
|
'image_path': f'{val_image_path}/val2017', |
|
'annotation_path': f'{val_annotation_paths}/stuff_validation.jsonl' |
|
} |
|
) |
|
] |
|
|
|
def _generate_examples(self, image_path, annotation_path): |
|
idx = 0 |
|
image_path = Path(image_path) |
|
with open(annotation_path, 'r', encoding='utf-8') as f: |
|
for line in f: |
|
obj = json.loads(line.strip()) |
|
example = { |
|
'image': str(image_path / obj['image']), |
|
'categories': obj['categories'], |
|
'sem.rles': obj['sem.rles'] |
|
} |
|
yield idx, example |
|
idx += 1 |
|
|