mict-zhaw commited on
Commit
47d0df3
1 Parent(s): 4e7dd25

Use json instead of jsonl

Browse files
Files changed (1) hide show
  1. chall.py +14 -14
chall.py CHANGED
@@ -1,7 +1,6 @@
1
  import json
2
  import os
3
  from typing import Union, List
4
-
5
  from datasets import DatasetInfo, BuilderConfig, GeneratorBasedBuilder, Version, Features, Value, Audio, SplitGenerator, Split, logging
6
  from datasets.features import Sequence
7
  import soundfile as sf
@@ -13,8 +12,7 @@ _SAMPLE_RATE = 16000
13
  _DESCRIPTION = "tbd"
14
  _CITATION = "tbd"
15
 
16
- _META_FILE = "chall_data.jsonl"
17
-
18
 
19
  logger = logging.get_logger(__name__)
20
 
@@ -204,15 +202,18 @@ class Chall(GeneratorBasedBuilder):
204
  return [
205
  SplitGenerator(
206
  name=Split.TRAIN,
207
- gen_kwargs={"filepath": os.path.join(data_dir, "data"), "metafile": os.path.join(data_dir, _META_FILE)},
 
 
 
208
  ),
209
  # datasets.SplitGenerator(
210
  # name=datasets.Split.TEST,
211
- # gen_kwargs={"filepath": os.path.join(data_dir, "data"), "metafile": os.path.join(data_dir, _META_FILE)},
212
  # ),
213
  # datasets.SplitGenerator(
214
  # name=datasets.Split.VALIDATION,
215
- # gen_kwargs={"filepath": os.path.join(data_dir, "data"), "metafile": os.path.join(data_dir, _META_FILE)},
216
  # ),
217
  ]
218
 
@@ -227,21 +228,20 @@ class Chall(GeneratorBasedBuilder):
227
  logger.info("generating examples from = %s", filepath)
228
 
229
  with open(metafile, 'r') as file:
230
- for line in file:
231
- data = json.loads(line)
232
-
233
  # load transcript
234
- transcript_file = os.path.join(filepath, data["transcript_file"])
235
  with open(transcript_file, 'r') as transcript:
236
  transcript = json.load(transcript)
237
 
238
- audio_id = data['audio_id']
239
- audio_file_path = os.path.join(filepath, data["audio_file"])
240
 
241
  if self.config.split_segments:
242
- yield from self._generate_utterance_examples(audio_id, str(audio_file_path), data, transcript)
243
  else:
244
- yield from self._generate_transcript_examples(audio_id, str(audio_file_path), data, transcript)
245
 
246
  @staticmethod
247
  def _generate_transcript_examples(audio_id: str, audio_file_path: str, data: dict, transcript: dict):
 
1
  import json
2
  import os
3
  from typing import Union, List
 
4
  from datasets import DatasetInfo, BuilderConfig, GeneratorBasedBuilder, Version, Features, Value, Audio, SplitGenerator, Split, logging
5
  from datasets.features import Sequence
6
  import soundfile as sf
 
12
  _DESCRIPTION = "tbd"
13
  _CITATION = "tbd"
14
 
15
+ _METAFILE = "chall_metadata.json"
 
16
 
17
  logger = logging.get_logger(__name__)
18
 
 
202
  return [
203
  SplitGenerator(
204
  name=Split.TRAIN,
205
+ gen_kwargs={
206
+ "filepath": os.path.join(data_dir, "data"),
207
+ "metafile": os.path.join(data_dir, _METAFILE)
208
+ },
209
  ),
210
  # datasets.SplitGenerator(
211
  # name=datasets.Split.TEST,
212
+ # gen_kwargs={"filepath": os.path.join(data_dir, "data"), "metafile": os.path.join(data_dir, _METAFILE)},
213
  # ),
214
  # datasets.SplitGenerator(
215
  # name=datasets.Split.VALIDATION,
216
+ # gen_kwargs={"filepath": os.path.join(data_dir, "data"), "metafile": os.path.join(data_dir, _METAFILE)},
217
  # ),
218
  ]
219
 
 
228
  logger.info("generating examples from = %s", filepath)
229
 
230
  with open(metafile, 'r') as file:
231
+ metadata = json.load(file)
232
+ for row in metadata["data"]:
 
233
  # load transcript
234
+ transcript_file = os.path.join(filepath, row["transcript_file"])
235
  with open(transcript_file, 'r') as transcript:
236
  transcript = json.load(transcript)
237
 
238
+ audio_id = row['audio_id']
239
+ audio_file_path = os.path.join(filepath, row["audio_file"])
240
 
241
  if self.config.split_segments:
242
+ yield from self._generate_utterance_examples(audio_id, str(audio_file_path), row, transcript)
243
  else:
244
+ yield from self._generate_transcript_examples(audio_id, str(audio_file_path), row, transcript)
245
 
246
  @staticmethod
247
  def _generate_transcript_examples(audio_id: str, audio_file_path: str, data: dict, transcript: dict):