glenn-jocher commited on
Commit
9b4e054
·
unverified ·
1 Parent(s): cf3fb58

Fix AMP check tolerance (#7937)

Browse files

Adjust to 5%, fixes failing Colab AMP check with V100 (1.5% different) with 200% safety margin.

Files changed (1) hide show
  1. utils/general.py +3 -3
utils/general.py CHANGED
@@ -520,10 +520,10 @@ def check_amp(model):
520
  LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.'))
521
  return True
522
  m = AutoShape(model, verbose=False) # model
523
- a = m(im).xyxy[0] # FP32 inference
524
  m.amp = True
525
- b = m(im).xyxy[0] # AMP inference
526
- if (a.shape == b.shape) and torch.allclose(a, b, atol=1.0): # close to 1.0 pixel bounding box
527
  LOGGER.info(emojis(f'{prefix}checks passed ✅'))
528
  return True
529
  else:
 
520
  LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.'))
521
  return True
522
  m = AutoShape(model, verbose=False) # model
523
+ a = m(im).xywhn[0] # FP32 inference
524
  m.amp = True
525
+ b = m(im).xywhn[0] # AMP inference
526
+ if (a.shape == b.shape) and torch.allclose(a, b, atol=0.05): # close to 5% absolute tolerance
527
  LOGGER.info(emojis(f'{prefix}checks passed ✅'))
528
  return True
529
  else: