glenn-jocher commited on
Commit
63e09fd
·
unverified ·
1 Parent(s): 4e8c81a

Remove `encoding='ascii'` (#4413)

Browse files

* Remove `encoding='ascii'`

* Reinstate `encoding='ascii'` in emojis()

utils/autoanchor.py CHANGED
@@ -104,7 +104,7 @@ def kmean_anchors(dataset='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen
104
  return k
105
 
106
  if isinstance(dataset, str): # *.yaml file
107
- with open(dataset, encoding='ascii', errors='ignore') as f:
108
  data_dict = yaml.safe_load(f) # model dict
109
  from utils.datasets import LoadImagesAndLabels
110
  dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True)
 
104
  return k
105
 
106
  if isinstance(dataset, str): # *.yaml file
107
+ with open(dataset, errors='ignore') as f:
108
  data_dict = yaml.safe_load(f) # model dict
109
  from utils.datasets import LoadImagesAndLabels
110
  dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True)
utils/datasets.py CHANGED
@@ -931,7 +931,7 @@ def dataset_stats(path='coco128.yaml', autodownload=False, verbose=False, profil
931
  im.save(im_dir / Path(f).name, quality=75) # save
932
 
933
  zipped, data_dir, yaml_path = unzip(Path(path))
934
- with open(check_file(yaml_path), encoding='ascii', errors='ignore') as f:
935
  data = yaml.safe_load(f) # data dict
936
  if zipped:
937
  data['path'] = data_dir # TODO: should this be dir.resolve()?
 
931
  im.save(im_dir / Path(f).name, quality=75) # save
932
 
933
  zipped, data_dir, yaml_path = unzip(Path(path))
934
+ with open(check_file(yaml_path), errors='ignore') as f:
935
  data = yaml.safe_load(f) # data dict
936
  if zipped:
937
  data['path'] = data_dir # TODO: should this be dir.resolve()?
utils/general.py CHANGED
@@ -112,7 +112,7 @@ def is_pip():
112
 
113
  def emojis(str=''):
114
  # Return platform-dependent emoji-safe version of string
115
- return str.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else str
116
 
117
 
118
  def file_size(file):
@@ -250,7 +250,7 @@ def check_dataset(data, autodownload=True):
250
 
251
  # Read yaml (optional)
252
  if isinstance(data, (str, Path)):
253
- with open(data, encoding='ascii', errors='ignore') as f:
254
  data = yaml.safe_load(f) # dictionary
255
 
256
  # Parse yaml
 
112
 
113
  def emojis(str=''):
114
  # Return platform-dependent emoji-safe version of string
115
+ return str.encode().decode(encoding='ascii', errors='ignore') if platform.system() == 'Windows' else str
116
 
117
 
118
  def file_size(file):
 
250
 
251
  # Read yaml (optional)
252
  if isinstance(data, (str, Path)):
253
+ with open(data, errors='ignore') as f:
254
  data = yaml.safe_load(f) # dictionary
255
 
256
  # Parse yaml
utils/loggers/wandb/wandb_utils.py CHANGED
@@ -62,7 +62,7 @@ def check_wandb_resume(opt):
62
 
63
 
64
  def process_wandb_config_ddp_mode(opt):
65
- with open(check_file(opt.data), encoding='ascii', errors='ignore') as f:
66
  data_dict = yaml.safe_load(f) # data dict
67
  train_dir, val_dir = None, None
68
  if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX):
@@ -152,7 +152,7 @@ class WandbLogger():
152
  self.wandb_artifact_data_dict = self.check_and_upload_dataset(opt)
153
 
154
  elif opt.data.endswith('_wandb.yaml'): # When dataset is W&B artifact
155
- with open(opt.data, encoding='ascii', errors='ignore') as f:
156
  data_dict = yaml.safe_load(f)
157
  self.data_dict = data_dict
158
  else: # Local .yaml dataset file or .zip file
@@ -186,7 +186,7 @@ class WandbLogger():
186
  opt.single_cls,
187
  'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem)
188
  print("Created dataset config file ", config_path)
189
- with open(config_path, encoding='ascii', errors='ignore') as f:
190
  wandb_data_dict = yaml.safe_load(f)
191
  return wandb_data_dict
192
 
 
62
 
63
 
64
  def process_wandb_config_ddp_mode(opt):
65
+ with open(check_file(opt.data), errors='ignore') as f:
66
  data_dict = yaml.safe_load(f) # data dict
67
  train_dir, val_dir = None, None
68
  if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX):
 
152
  self.wandb_artifact_data_dict = self.check_and_upload_dataset(opt)
153
 
154
  elif opt.data.endswith('_wandb.yaml'): # When dataset is W&B artifact
155
+ with open(opt.data, errors='ignore') as f:
156
  data_dict = yaml.safe_load(f)
157
  self.data_dict = data_dict
158
  else: # Local .yaml dataset file or .zip file
 
186
  opt.single_cls,
187
  'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem)
188
  print("Created dataset config file ", config_path)
189
+ with open(config_path, errors='ignore') as f:
190
  wandb_data_dict = yaml.safe_load(f)
191
  return wandb_data_dict
192