File size: 4,359 Bytes
dc2f677
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""This dataset contains interior images created using DALL-E.
The dataset contains 512x512 images split into 5 classes:
* bathroom: 1000 images
* bedroom: 1000 images
* dining_room: 1000 images
* kitchen: 1000 images
* living_room: 1000 images
"""

import datasets
from datasets.download.download_manager import DownloadManager
from datasets.tasks import ImageClassification
from pathlib import Path
from typing import List, Iterator

_ALLOWED_IMG_EXT = {".png", ".jpg"}

_CITATION = """\
@InProceedings{huggingface:dataset,
title = {Computer Generated interior images},
author={Padilla, Rafael},
year={2023}
}
"""

_DESCRIPTION = """\
This new dataset contains CG interior images representing interior of houses in 5 classes, with \
    1000 images per class.
"""

_HOMEPAGE = "https://huggingface.co/datasets/rafaelpadilla/interior-cgi/"

_LICENSE = ""

_URLS = {
    "test": "https://huggingface.co/datasets/rafaelpadilla/interior-cgi/resolve/main/data/test.zip",
    "train": "https://huggingface.co/datasets/rafaelpadilla/interior-cgi/resolve/main/data/train.zip",
}

_NAMES = ["bathroom", "bedroom", "dining_room", "kitchen", "living_room"]


class CGInteriorDataset(datasets.GeneratorBasedBuilder):
    """CGInterior: Computer Generated Interior images dataset"""

    VERSION = datasets.Version("1.1.0")

    def _info(self) -> datasets.DatasetInfo:
        """
        Returns the dataset metadata and features.

        Returns:
            DatasetInfo: Metadata and features of the dataset.
        """
        return datasets.DatasetInfo(
            description=_DESCRIPTION,
            features=datasets.Features(
                {
                    "image": datasets.Image(),
                    "label_id": datasets.features.ClassLabel(names=_NAMES),
                    "label_name": datasets.Value("string"),
                }
            ),
            supervised_keys=("image", "label_id"),
            homepage=_HOMEPAGE,
            license=_LICENSE,
            citation=_CITATION,
            task_templates=[
                ImageClassification(image_column="image", label_column="label_id")
            ],
        )

    def _split_generators(
        self, dl_manager: DownloadManager
    ) -> List[datasets.SplitGenerator]:
        """
        Provides the split information and downloads the data.

        Args:
            dl_manager (DownloadManager): The DownloadManager to use for downloading and extracting data.

        Returns:
            List[SplitGenerator]: List of SplitGenerator objects representing the data splits.
        """
        data_files = dl_manager.download_and_extract(_URLS)
        return [
            datasets.SplitGenerator(
                name=datasets.Split.TRAIN,
                gen_kwargs={
                    "files": dl_manager.iter_files(data_files["train"]),
                },
            ),
            datasets.SplitGenerator(
                name=datasets.Split.TEST,
                gen_kwargs={
                    "files": dl_manager.iter_files(data_files["test"]),
                },
            ),
        ]

    def _generate_examples(self, files: List[str]) -> Iterator:
        """
        Generates examples for the dataset.

        Args:
            files (List[str]): List of image paths.

        Yields:
            Dict[str, Union[str, Image]]: A dictionary containing the generated examples.
        """
        for idx, img_path in enumerate(files):
            path = Path(img_path)
            if path.suffix in _ALLOWED_IMG_EXT:
                yield idx, {
                    "image": img_path,
                    "label_id": path.parent.name,
                    "label_name": path.parent.name,
                }