A few optimizer comments, dead import, missing import

This commit is contained in:
Ross Wightman 2021-08-17 18:01:33 -07:00
parent 959eaff121
commit 8a9eca5157
3 changed files with 8 additions and 2 deletions

View File

@ -3,6 +3,7 @@ from .adamw import AdamW
from .adafactor import Adafactor
from .adahessian import Adahessian
from .lookahead import Lookahead
from .madgrad import MADGRAD
from .nadam import Nadam
from .nvnovograd import NvNovoGrad
from .radam import RAdam

View File

@ -1,3 +1,9 @@
""" PyTorch MADGRAD optimizer
MADGRAD: https://arxiv.org/abs/2101.11075
Code from: https://github.com/facebookresearch/madgrad
"""
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the

View File

@ -1,12 +1,11 @@
""" Optimizer Factory w/ Custom Weight Decay
Hacked together by / Copyright 2020 Ross Wightman
Hacked together by / Copyright 2021 Ross Wightman
"""
from typing import Optional
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim.optimizer import required
from .adabelief import AdaBelief
from .adafactor import Adafactor