mickylan2367 commited on
Commit
b079785
·
1 Parent(s): 21bd836

change loading script

Browse files
Files changed (1) hide show
  1. spectrogram_musicCaps.py +71 -40
spectrogram_musicCaps.py CHANGED
@@ -1,8 +1,16 @@
1
 
2
  import datasets
 
 
 
 
 
 
3
 
4
  # ここに設定を記入
5
  _NAME = "mickylan2367/spectrogram_musicCaps"
 
 
6
 
7
  # _HOMEPAGE = "https://github.com/fastai/imagenette"
8
  # プログラムを置く場所が決まったら、ここにホームページURLつける
@@ -17,11 +25,17 @@ Using for Project Learning...
17
  _IMAGES_DIR = "mickylan2367/images/data/"
18
  # _REPO = "https://huggingface.co/datasets/frgfm/imagenette/resolve/main/metadata"
19
 
 
 
 
 
 
 
20
 
21
  class spectrogram_musicCapsConfig(datasets.BuilderConfig):
22
  """Builder Config for spectrogram_MusicCaps"""
23
 
24
- def __init__(self, data_url, metadata_urls, **kwargs):
25
  """BuilderConfig
26
  Args:
27
  data_url: `string`, url to download the zip file from.
@@ -29,10 +43,6 @@ class spectrogram_musicCapsConfig(datasets.BuilderConfig):
29
  **kwargs: keyword arguments forwarded to super.
30
  """
31
  super(spectrogram_musicCapsConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
32
- self.data_url = data_url
33
- self.metadata_urls = metadata_urls
34
- print(data_url)
35
- print(metadata_urls)
36
 
37
 
38
  class spectrogram_musicCaps(datasets.GeneratorBasedBuilder):
@@ -42,20 +52,15 @@ class spectrogram_musicCaps(datasets.GeneratorBasedBuilder):
42
  spectrogram_musicCapsConfig(
43
  name="MusicCaps data 0_10",
44
  description="Datasets from MusicCaps by Mikan",
45
- data_url="https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/data0_10.zip",
46
- metadata_urls={
47
- "train": "https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/metadata0_10.jsonl",
48
- # "validation": "huggingface?"
49
- },
50
  ),
 
51
  spectrogram_musicCapsConfig(
52
  name="MusicCpas data 10_100",
53
  description="Datasets second action by Mikan",
54
- data_url="https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/data10_200.zip",
55
- metadata_urls={
56
- "train": "https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/metadata10_200.jsonl",
57
- # "validation": "https://link-to-dinner-foods-validation.txt"
58
- },
59
  )
60
  ]
61
 
@@ -75,30 +80,56 @@ class spectrogram_musicCaps(datasets.GeneratorBasedBuilder):
75
  # task_templates=[ImageClassification(image_column="image", label_column="label")],
76
  )
77
 
78
- def _split_generators(self, dl_manager):
79
- archive_path = dl_manager.download(self.config.data_url)
80
- split_metadata_paths = dl_manager.download(self.config.metadata_urls)
81
- return [
82
- datasets.SplitGenerator(
83
- name=datasets.Split.TRAIN,
84
- gen_kwargs={
85
- "images": dl_manager.iter_archive(archive_path),
86
- "metadata_path": split_metadata_paths["train"],
87
- }
88
- )
89
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
  def _generate_examples(self, images, metadata_path):
92
- print(images)
93
- """Generate images and captions for splits."""
94
- # with open(metadata_path, encoding="utf-8") as f:
95
- # files_to_keep = set(f.read().split("\n"))
96
- with open(metadata_path, encoding="utf-8") as f:
97
- # テキストファイルの方を入れる..
98
- captions = f.readlines()
99
-
100
- for idx, (file_path, file_obj) in enumerate(zip(images)):
101
- yield file_path,{
102
- "image": file_obj.read(),
103
- "caption":captions[idx]
104
- }
 
1
 
2
  import datasets
3
+ from huggingface_hub import HfApi
4
+ from datasets import DownloadManager, DatasetInfo
5
+ from datasets.data_files import DataFilesDict
6
+ import os
7
+ import json
8
+
9
 
10
  # ここに設定を記入
11
  _NAME = "mickylan2367/spectrogram_musicCaps"
12
+ _EXTENSION = [".png"]
13
+ _REVISION = "main"
14
 
15
  # _HOMEPAGE = "https://github.com/fastai/imagenette"
16
  # プログラムを置く場所が決まったら、ここにホームページURLつける
 
25
  _IMAGES_DIR = "mickylan2367/images/data/"
26
  # _REPO = "https://huggingface.co/datasets/frgfm/imagenette/resolve/main/metadata"
27
 
28
+ # 参考になりそうなURL集
29
+ # https://huggingface.co/docs/datasets/v1.1.1/_modules/datasets/utils/download_manager.html
30
+ # https://huggingface.co/datasets/animelover/danbooru2022/blob/main/danbooru2022.py
31
+ # https://huggingface.co/datasets/food101/blob/main/food101.py
32
+ # https://huggingface.co/docs/datasets/about_dataset_load
33
+
34
 
35
  class spectrogram_musicCapsConfig(datasets.BuilderConfig):
36
  """Builder Config for spectrogram_MusicCaps"""
37
 
38
+ def __init__(self, **kwargs):
39
  """BuilderConfig
40
  Args:
41
  data_url: `string`, url to download the zip file from.
 
43
  **kwargs: keyword arguments forwarded to super.
44
  """
45
  super(spectrogram_musicCapsConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
 
 
 
 
46
 
47
 
48
  class spectrogram_musicCaps(datasets.GeneratorBasedBuilder):
 
52
  spectrogram_musicCapsConfig(
53
  name="MusicCaps data 0_10",
54
  description="Datasets from MusicCaps by Mikan",
55
+ # data_url="https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/data0_10.zip",
56
+ metadata_url="train": "https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/metadata0_10.jsonl"
 
 
 
57
  ),
58
+
59
  spectrogram_musicCapsConfig(
60
  name="MusicCpas data 10_100",
61
  description="Datasets second action by Mikan",
62
+ # data_url="https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/data10_200.zip",
63
+ metadata_url = "train": "https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/metadata10_200.jsonl"
 
 
 
64
  )
65
  ]
66
 
 
80
  # task_templates=[ImageClassification(image_column="image", label_column="label")],
81
  )
82
 
83
+ # def _split_generators(self, dl_manager):
84
+ # archive_path = dl_manager.download(self.config.data_url)
85
+ # split_metadata_paths = dl_manager.download(self.config.metadata_urls)
86
+ # return [
87
+ # datasets.SplitGenerator(
88
+ # name=datasets.Split.TRAIN,
89
+ # gen_kwargs={
90
+ # "images": dl_manager.iter_archive(archive_path),
91
+ # "metadata_path": split_metadata_paths["train"],
92
+ # }
93
+ # )
94
+ # ]
95
+
96
+ def _split_generators(self, dl_manager: DownloadManager):
97
+ # huggingfaceのディレクトリからデータを取ってくる
98
+ hfh_dataset_info = HfApi().dataset_info(_NAME, revision=_REVISION, timeout=100.0)
99
+ # archive_path = dl_manager.download(self.config.data_url)
100
+ split_metadata_paths = dl_manager.download(self.config.metadata_url)
101
+
102
+ # **.zipのファイル名をDict型として取得?
103
+ data_files = DataFilesDict.from_hf_repo(
104
+ {datasets.Split.TRAIN: ["**"]},
105
+ dataset_info=hfh_dataset_info,
106
+ allowed_extensions=["zip", ".zip"],
107
+ )
108
+
109
+ gs = []
110
+ for split, files in data_files.items():
111
+ downloaded_files = dl_manager.download_and_extract(files) # zipファイルを解凍してファイル名リストにする。
112
+ # 元のコードではzipファイルの中身を"filepath"としてそのまま_generate_exampleに引き渡している?
113
+ gs.append(
114
+ datasets.SplitGenerator(
115
+ name = split,
116
+ gen_kwargs={
117
+ "images" : downloaded_files,
118
+ "metadata_path": split_metadata_paths["train"]
119
+ }
120
+ )
121
+ )
122
+ return gs
123
 
124
  def _generate_examples(self, images, metadata_path):
125
+ """Generate images and captions for splits."""
126
+ # with open(metadata_path, encoding="utf-8") as f:
127
+ # files_to_keep = set(f.read().split("\n"))
128
+ with open(metadata_path) as fin:
129
+ for idx, line in enumerate(fin):
130
+ data = json.loads(line)
131
+ # file_path = os.path.join(data["file_name"])
132
+ yield data["file_name"], {
133
+ "image": images[idx],
134
+ "caption":data["caption"]
135
+ }