Datasets:

Modalities:
Tabular
Text
Formats:
parquet
Languages:
code
ArXiv:
Libraries:
Datasets
Dask
License:

Bug: datasets.builder.DatasetGenerationError: An error occurred while generating the dataset

#30
by michaelroyzen - opened

I'm getting an error with datasets 1.13.1.

My code:

import os
import datasets as ds

MY_CACHE_DIR = "/home/ubuntu/the-stack-dedup-local"
MY_TOKEN="my-token"

the_stack_ds = ds.load_dataset("bigcode/the-stack-dedup", split="train", download_mode="reuse_cache_if_exists", cache_dir=MY_CACHE_DIR, use_auth_token=MY_TOKEN, num_proc=64)

The exception:

Generating train split: 233248251 examples [54:31, 57280.00 examples/s]
multiprocess.pool.RemoteTraceback:                                     
"""                                                                    
Traceback (most recent call last):                                     
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/build
er.py", line 1879, in _prepare_split_single                            
    for _, table in generator:                                         
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/packa
ged_modules/parquet/parquet.py", line 82, in _generate_tables          
    yield f"{file_idx}_{batch_idx}", self._cast_table(pa_table)        
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/packa
ged_modules/parquet/parquet.py", line 61, in _cast_table               
    pa_table = table_cast(pa_table, self.info.features.arrow_schema)   
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/table
.py", line 2324, in table_cast                                         
    return cast_table_to_schema(table, schema)                         
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/table
.py", line 2282, in cast_table_to_schema                               
    raise ValueError(f"Couldn't cast\n{table.schema}\nto\n{features}\nb
ecause column names don't match")                                      
ValueError: Couldn't cast                                              
hexsha: string                                                         
size: int64                                                            
ext: string                                                            
lang: string                                                           
max_stars_repo_path: string                                            
max_stars_repo_name: string                                            
max_stars_repo_head_hexsha: string                                     
max_stars_repo_licenses: list<item: string>                            
  child 0, item: string                                                
max_stars_count: int64                                                 
max_stars_repo_stars_event_min_datetime: string                        
max_stars_repo_stars_event_max_datetime: string                        
max_issues_repo_path: string                                           
max_issues_repo_name: string                                           
max_issues_repo_head_hexsha: string                                    
max_issues_repo_licenses: list<item: string>                           
  child 0, item: string                                                
max_issues_count: int64                                                
max_issues_repo_issues_event_min_datetime: string                      
max_issues_repo_issues_event_max_datetime: string                      
max_forks_repo_path: string                                            
max_forks_repo_name: string                                            
max_forks_repo_head_hexsha: string                                     
max_forks_repo_licenses: list<item: string>                            
  child 0, item: string                                                
max_forks_count: int64
max_forks_repo_forks_event_min_datetime: string
max_forks_repo_forks_event_max_datetime: string
content: string
avg_line_length: double
max_line_length: int64
alphanum_fraction: double
__id__: int64
-- schema metadata --
huggingface: '{"info": {"features": {"hexsha": {"dtype": "string", "_type' + 1979
to
{'hexsha': Value(dtype='string', id=None), 'size': Value(dtype='int64', id=None), 'ext': Value(dtype='string', id=None), 'lang': Value(dtype='string', id=None), 'max_stars_repo_path': Value(dtype='string', id=None), 'max_stars_repo_name': Value(dtype='string', id=None), 'max_stars_repo_head_hexsha': Value(dtype='string', id=None), 'max_stars_repo_licenses': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'max_stars_count': Value(dtype='int64', id=None), 'max_stars_repo_stars_event_min_datetime': Value(dtype='string', id=None), 'max_stars_repo_stars_event_max_datetime': Value(dtype='string', id=None), 'max_issues_repo_path': Value(dtype='string', id=None), 'max_issues_repo_name': Value(dtype='string', id=None), 'max_issues_repo_head_hexsha': Value(dtype='string', id=None), 'max_issues_repo_licenses': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'max_issues_count': Value(dtype='int64', id=None), 'max_issues_repo_issues_event_min_datetime': Value(dtype='string', id=None), 'max_issues_repo_issues_event_max_datetime': Value(dtype='string', id=None), 'max_forks_repo_path': Value(dtype='string', id=None), 'max_forks_repo_name': Value(dtype='string', id=None), 'max_forks_repo_head_hexsha': Value(dtype='string', id=None), 'max_forks_repo_licenses': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'max_forks_count': Value(dtype='int64', id=None), 'max_forks_repo_forks_event_min_datetime': Value(dtype='string', id=None), 'max_forks_repo_forks_event_max_datetime': Value(dtype='string', id=None), 'content': Value(dtype='string', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'alphanum_fraction': Value(dtype='float64', id=None)}
because column names don't match

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
  File "/home/ubuntu/.local/lib/python3.10/site-packages/multiprocess/p
ool.py", line 125, in worker
    result = (True, func(*args, **kwds))
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/utils
/py_utils.py", line 1328, in _write_generator_to_queue
    for i, result in enumerate(func(**kwargs)):
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/build
er.py", line 1912, in _prepare_split_single
    raise DatasetGenerationError("An error occurred while generating th
e dataset") from e
datasets.builder.DatasetGenerationError: An error occurred while genera
ting the dataset
"""

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
  File "/home/ubuntu/download_the_stack.py", line 7, in <module>
    the_stack_ds = ds.load_dataset("bigcode/the-stack-dedup", split="tr
ain", download_mode="reuse_cache_if_exists", cache_dir=MY_CACHE_DIR, us
e_auth_token=MY_TOKEN, num_proc=64) 
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/load.
py", line 1809, in load_dataset
    builder_instance.download_and_prepare(
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/build
er.py", line 909, in download_and_prepare
    self._download_and_prepare(
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/build
er.py", line 1004, in _download_and_prepare
    self._prepare_split(split_generator, **prepare_split_kwargs)
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/build
er.py", line 1796, in _prepare_split
    for job_id, done, content in iflatmap_unordered(
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/utils
/py_utils.py", line 1354, in iflatmap_unordered
    [async_result.get(timeout=0.05) for async_result in async_results]
  File "/home/ubuntu/.local/lib/python3.10/site-packages/datasets/utils
/py_utils.py", line 1354, in <listcomp>
    [async_result.get(timeout=0.05) for async_result in async_results]
  File "/home/ubuntu/.local/lib/python3.10/site-packages/multiprocess/p
ool.py", line 774, in get
    raise self._value
datasets.builder.DatasetGenerationError: An error occurred while generating the dataset

@loubnabnl would love your input please.

lhoestq changed discussion status to closed

Sign up or log in to comment