Skip to content

Commit 9b4e054

Browse files
authored
Fix AMP check tolerance (#7937)
Adjust to 5%, fixes failing Colab AMP check with V100 (1.5% different) with 200% safety margin.
1 parent cf3fb58 commit 9b4e054

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

utils/general.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -520,10 +520,10 @@ def check_amp(model):
520520
LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.'))
521521
return True
522522
m = AutoShape(model, verbose=False) # model
523-
a = m(im).xyxy[0] # FP32 inference
523+
a = m(im).xywhn[0] # FP32 inference
524524
m.amp = True
525-
b = m(im).xyxy[0] # AMP inference
526-
if (a.shape == b.shape) and torch.allclose(a, b, atol=1.0): # close to 1.0 pixel bounding box
525+
b = m(im).xywhn[0] # AMP inference
526+
if (a.shape == b.shape) and torch.allclose(a, b, atol=0.05): # close to 5% absolute tolerance
527527
LOGGER.info(emojis(f'{prefix}checks passed ✅'))
528528
return True
529529
else:

0 commit comments

Comments
 (0)