FP16 EMA bug fix
parent
2b1892430f
commit
2377e5f6ee
utils
|
@ -195,8 +195,8 @@ class ModelEMA:
|
|||
def __init__(self, model, decay=0.9999, updates=0):
|
||||
# Create EMA
|
||||
self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA
|
||||
if next(model.parameters()).device.type != 'cpu':
|
||||
self.ema.half() # FP16 EMA
|
||||
# if next(model.parameters()).device.type != 'cpu':
|
||||
# self.ema.half() # FP16 EMA
|
||||
self.updates = updates # number of EMA updates
|
||||
self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs)
|
||||
for p in self.ema.parameters():
|
||||
|
|
Loading…
Reference in New Issue