EMA FP16 fix #279

This commit is contained in:
Glenn Jocher 2020-07-03 16:57:08 -07:00
parent f767023c56
commit af41083d52

View File

@ -176,13 +176,13 @@ class ModelEMA:
def __init__(self, model, decay=0.9999, device=''):
# Create EMA
self.ema = deepcopy(model.module if is_parallel(model) else model).half() # FP16 EMA
self.ema = deepcopy(model.module if is_parallel(model) else model) # FP32 EMA
self.ema.eval()
self.updates = 0 # number of EMA updates
self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs)
self.device = device # perform ema on different device from model if set
if device:
self.ema.to(device=device)
self.ema.to(device)
for p in self.ema.parameters():
p.requires_grad_(False)