Fix AMP check tolerance (#7937)

Adjust to 5%, fixes failing Colab AMP check with V100 (1.5% different) with 200% safety margin.
This commit is contained in:
Glenn Jocher 2022-05-23 15:57:16 +02:00 committed by GitHub
parent cf3fb58522
commit 9b4e05439c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -520,10 +520,10 @@ def check_amp(model):
LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.'))
return True
m = AutoShape(model, verbose=False) # model
a = m(im).xyxy[0] # FP32 inference
a = m(im).xywhn[0] # FP32 inference
m.amp = True
b = m(im).xyxy[0] # AMP inference
if (a.shape == b.shape) and torch.allclose(a, b, atol=1.0): # close to 1.0 pixel bounding box
b = m(im).xywhn[0] # AMP inference
if (a.shape == b.shape) and torch.allclose(a, b, atol=0.05): # close to 5% absolute tolerance
LOGGER.info(emojis(f'{prefix}checks passed ✅'))
return True
else: