Fix AMP check tolerance (#7937)
Browse filesAdjust to 5%, fixes failing Colab AMP check with V100 (1.5% different) with 200% safety margin.
- utils/general.py +3 -3
utils/general.py
CHANGED
@@ -520,10 +520,10 @@ def check_amp(model):
|
|
520 |
LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.'))
|
521 |
return True
|
522 |
m = AutoShape(model, verbose=False) # model
|
523 |
-
a = m(im).
|
524 |
m.amp = True
|
525 |
-
b = m(im).
|
526 |
-
if (a.shape == b.shape) and torch.allclose(a, b, atol=
|
527 |
LOGGER.info(emojis(f'{prefix}checks passed ✅'))
|
528 |
return True
|
529 |
else:
|
|
|
520 |
LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.'))
|
521 |
return True
|
522 |
m = AutoShape(model, verbose=False) # model
|
523 |
+
a = m(im).xywhn[0] # FP32 inference
|
524 |
m.amp = True
|
525 |
+
b = m(im).xywhn[0] # AMP inference
|
526 |
+
if (a.shape == b.shape) and torch.allclose(a, b, atol=0.05): # close to 5% absolute tolerance
|
527 |
LOGGER.info(emojis(f'{prefix}checks passed ✅'))
|
528 |
return True
|
529 |
else:
|