FP16 EMA bug fix

pull/408/head
Glenn Jocher 2020-07-13 15:47:46 -07:00
parent 2b1892430f
commit 2377e5f6ee
1 changed files with 2 additions and 2 deletions

View File

@ -195,8 +195,8 @@ class ModelEMA:
def __init__(self, model, decay=0.9999, updates=0):
# Create EMA
self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA
if next(model.parameters()).device.type != 'cpu':
self.ema.half() # FP16 EMA
# if next(model.parameters()).device.type != 'cpu':
# self.ema.half() # FP16 EMA
self.updates = updates # number of EMA updates
self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs)
for p in self.ema.parameters():