mirror of
https://github.com/ultralytics/yolov5.git
synced 2025-06-03 14:49:29 +08:00
smart_optimizer()
revert to weight with decay (#9817)
If a parameter does not fall into any other category Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com> Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
parent
df80e7c723
commit
e42c89d4ef
@ -319,12 +319,13 @@ def smart_optimizer(model, name='Adam', lr=0.001, momentum=0.9, decay=1e-5):
|
||||
g = [], [], [] # optimizer parameter groups
|
||||
bn = tuple(v for k, v in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d()
|
||||
for v in model.modules():
|
||||
if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias (no decay)
|
||||
g[2].append(v.bias)
|
||||
if isinstance(v, bn): # weight (no decay)
|
||||
g[1].append(v.weight)
|
||||
elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay)
|
||||
g[0].append(v.weight)
|
||||
for p_name, p in v.named_parameters(recurse=0):
|
||||
if p_name == 'bias': # bias (no decay)
|
||||
g[2].append(p)
|
||||
elif p_name == 'weight' and isinstance(v, bn): # weight (no decay)
|
||||
g[1].append(p)
|
||||
else:
|
||||
g[0].append(p) # weight (with decay)
|
||||
|
||||
if name == 'Adam':
|
||||
optimizer = torch.optim.Adam(g[2], lr=lr, betas=(momentum, 0.999)) # adjust beta1 to momentum
|
||||
|
Loading…
x
Reference in New Issue
Block a user