Update lamp.py comment

This commit is contained in:
Ross Wightman 2021-08-18 09:27:40 -07:00 committed by GitHub
parent 4d284017b8
commit 8f68193c91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -169,7 +169,7 @@ class Lamb(Optimizer):
trust_ratio = one_tensor
if weight_decay != 0 or group['use_nvlamb']:
# Layer adaptation. By default, skip layer adaptation on parameters that are
# excluded from weight norm, unless use_nvlamb == True, then always enabled.
# excluded from weight decay, unless use_nvlamb == True, then always enabled.
w_norm = p.data.norm(2.0)
g_norm = update.norm(2.0)
trust_ratio = torch.where(