2020-07-28 04:44:56 +08:00
|
|
|
""" Activation Factory
|
|
|
|
Hacked together by / Copyright 2020 Ross Wightman
|
|
|
|
"""
|
2021-05-27 06:28:42 +08:00
|
|
|
from typing import Union, Callable, Type
|
|
|
|
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
from .activations import *
|
|
|
|
from .activations_me import *
|
2024-05-07 07:32:49 +08:00
|
|
|
from .config import is_exportable, is_scriptable
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
|
2021-05-27 06:28:42 +08:00
|
|
|
# PyTorch has an optimized, native 'silu' (aka 'swish') operator as of PyTorch 1.7.
|
|
|
|
# Also hardsigmoid, hardswish, and soon mish. This code will use native version if present.
|
|
|
|
# Eventually, the custom SiLU, Mish, Hard*, layers will be removed and only native variants will be used.
|
2020-10-30 06:45:17 +08:00
|
|
|
_has_silu = 'silu' in dir(torch.nn.functional)
|
2021-05-27 06:28:42 +08:00
|
|
|
_has_hardswish = 'hardswish' in dir(torch.nn.functional)
|
|
|
|
_has_hardsigmoid = 'hardsigmoid' in dir(torch.nn.functional)
|
|
|
|
_has_mish = 'mish' in dir(torch.nn.functional)
|
|
|
|
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
|
|
|
|
_ACT_FN_DEFAULT = dict(
|
2020-10-30 06:45:17 +08:00
|
|
|
silu=F.silu if _has_silu else swish,
|
|
|
|
swish=F.silu if _has_silu else swish,
|
2021-05-27 06:28:42 +08:00
|
|
|
mish=F.mish if _has_mish else mish,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
relu=F.relu,
|
|
|
|
relu6=F.relu6,
|
2020-06-12 05:49:23 +08:00
|
|
|
leaky_relu=F.leaky_relu,
|
|
|
|
elu=F.elu,
|
|
|
|
celu=F.celu,
|
|
|
|
selu=F.selu,
|
2020-12-01 05:27:40 +08:00
|
|
|
gelu=gelu,
|
2022-11-22 14:14:12 +08:00
|
|
|
gelu_tanh=gelu_tanh,
|
2023-11-03 08:18:17 +08:00
|
|
|
quick_gelu=quick_gelu,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
sigmoid=sigmoid,
|
|
|
|
tanh=tanh,
|
2021-05-27 06:28:42 +08:00
|
|
|
hard_sigmoid=F.hardsigmoid if _has_hardsigmoid else hard_sigmoid,
|
|
|
|
hard_swish=F.hardswish if _has_hardswish else hard_swish,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
hard_mish=hard_mish,
|
|
|
|
)
|
|
|
|
|
|
|
|
_ACT_FN_ME = dict(
|
2020-10-30 06:45:17 +08:00
|
|
|
silu=F.silu if _has_silu else swish_me,
|
|
|
|
swish=F.silu if _has_silu else swish_me,
|
2021-05-27 06:28:42 +08:00
|
|
|
mish=F.mish if _has_mish else mish_me,
|
|
|
|
hard_sigmoid=F.hardsigmoid if _has_hardsigmoid else hard_sigmoid_me,
|
|
|
|
hard_swish=F.hardswish if _has_hardswish else hard_swish_me,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
hard_mish=hard_mish_me,
|
|
|
|
)
|
|
|
|
|
2024-05-07 07:32:49 +08:00
|
|
|
_ACT_FNS = (_ACT_FN_ME, _ACT_FN_DEFAULT)
|
2021-05-27 06:28:42 +08:00
|
|
|
for a in _ACT_FNS:
|
|
|
|
a.setdefault('hardsigmoid', a.get('hard_sigmoid'))
|
|
|
|
a.setdefault('hardswish', a.get('hard_swish'))
|
|
|
|
|
|
|
|
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
_ACT_LAYER_DEFAULT = dict(
|
2020-10-30 06:45:17 +08:00
|
|
|
silu=nn.SiLU if _has_silu else Swish,
|
|
|
|
swish=nn.SiLU if _has_silu else Swish,
|
2021-05-27 06:28:42 +08:00
|
|
|
mish=nn.Mish if _has_mish else Mish,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
relu=nn.ReLU,
|
|
|
|
relu6=nn.ReLU6,
|
2020-10-03 07:17:42 +08:00
|
|
|
leaky_relu=nn.LeakyReLU,
|
2020-06-12 05:49:23 +08:00
|
|
|
elu=nn.ELU,
|
2020-12-01 05:27:40 +08:00
|
|
|
prelu=PReLU,
|
2020-06-12 05:49:23 +08:00
|
|
|
celu=nn.CELU,
|
|
|
|
selu=nn.SELU,
|
2020-12-01 05:27:40 +08:00
|
|
|
gelu=GELU,
|
2022-11-22 14:14:12 +08:00
|
|
|
gelu_tanh=GELUTanh,
|
2023-11-03 08:18:17 +08:00
|
|
|
quick_gelu=QuickGELU,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
sigmoid=Sigmoid,
|
|
|
|
tanh=Tanh,
|
2021-05-27 06:28:42 +08:00
|
|
|
hard_sigmoid=nn.Hardsigmoid if _has_hardsigmoid else HardSigmoid,
|
|
|
|
hard_swish=nn.Hardswish if _has_hardswish else HardSwish,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
hard_mish=HardMish,
|
2023-04-21 13:41:39 +08:00
|
|
|
identity=nn.Identity,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
_ACT_LAYER_ME = dict(
|
2020-10-30 06:45:17 +08:00
|
|
|
silu=nn.SiLU if _has_silu else SwishMe,
|
|
|
|
swish=nn.SiLU if _has_silu else SwishMe,
|
2021-05-27 06:28:42 +08:00
|
|
|
mish=nn.Mish if _has_mish else MishMe,
|
|
|
|
hard_sigmoid=nn.Hardsigmoid if _has_hardsigmoid else HardSigmoidMe,
|
|
|
|
hard_swish=nn.Hardswish if _has_hardswish else HardSwishMe,
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
hard_mish=HardMishMe,
|
|
|
|
)
|
|
|
|
|
2024-05-07 07:32:49 +08:00
|
|
|
_ACT_LAYERS = (_ACT_LAYER_ME, _ACT_LAYER_DEFAULT)
|
2021-05-27 06:28:42 +08:00
|
|
|
for a in _ACT_LAYERS:
|
|
|
|
a.setdefault('hardsigmoid', a.get('hard_sigmoid'))
|
|
|
|
a.setdefault('hardswish', a.get('hard_swish'))
|
|
|
|
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
|
2021-05-27 06:28:42 +08:00
|
|
|
def get_act_fn(name: Union[Callable, str] = 'relu'):
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
""" Activation Function Factory
|
|
|
|
Fetching activation fns by name with this function allows export or torch script friendly
|
|
|
|
functions to be returned dynamically based on current config.
|
|
|
|
"""
|
|
|
|
if not name:
|
|
|
|
return None
|
2021-05-27 06:28:42 +08:00
|
|
|
if isinstance(name, Callable):
|
|
|
|
return name
|
2024-08-16 08:58:15 +08:00
|
|
|
name = name.lower()
|
2024-05-07 07:32:49 +08:00
|
|
|
if not (is_exportable() or is_scriptable()):
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
# If not exporting or scripting the model, first look for a memory-efficient version with
|
|
|
|
# custom autograd, then fallback
|
|
|
|
if name in _ACT_FN_ME:
|
|
|
|
return _ACT_FN_ME[name]
|
|
|
|
return _ACT_FN_DEFAULT[name]
|
|
|
|
|
|
|
|
|
2021-05-27 06:28:42 +08:00
|
|
|
def get_act_layer(name: Union[Type[nn.Module], str] = 'relu'):
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
""" Activation Layer Factory
|
|
|
|
Fetching activation layers by name with this function allows export or torch script friendly
|
|
|
|
functions to be returned dynamically based on current config.
|
|
|
|
"""
|
2023-11-04 00:10:34 +08:00
|
|
|
if name is None:
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
return None
|
2021-12-02 04:07:45 +08:00
|
|
|
if not isinstance(name, str):
|
|
|
|
# callable, module, etc
|
2021-05-27 06:28:42 +08:00
|
|
|
return name
|
2023-11-04 00:10:34 +08:00
|
|
|
if not name:
|
|
|
|
return None
|
2024-08-16 08:58:15 +08:00
|
|
|
name = name.lower()
|
2024-05-07 07:32:49 +08:00
|
|
|
if not (is_exportable() or is_scriptable()):
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
if name in _ACT_LAYER_ME:
|
|
|
|
return _ACT_LAYER_ME[name]
|
|
|
|
return _ACT_LAYER_DEFAULT[name]
|
|
|
|
|
|
|
|
|
2024-02-11 06:57:58 +08:00
|
|
|
def create_act_layer(name: Union[Type[nn.Module], str], inplace=None, **kwargs):
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
act_layer = get_act_layer(name)
|
2021-05-27 06:28:42 +08:00
|
|
|
if act_layer is None:
|
Monster commit, activation refactor, VoVNet, norm_act improvements, more
* refactor activations into basic PyTorch, jit scripted, and memory efficient custom auto
* implement hard-mish, better grad for hard-swish
* add initial VovNet V1/V2 impl, fix #151
* VovNet and DenseNet first models to use NormAct layers (support BatchNormAct2d, EvoNorm, InplaceIABN)
* Wrap IABN for any models that use it
* make more models torchscript compatible (DPN, PNasNet, Res2Net, SelecSLS) and add tests
2020-06-02 07:59:51 +08:00
|
|
|
return None
|
2022-08-18 05:32:58 +08:00
|
|
|
if inplace is None:
|
|
|
|
return act_layer(**kwargs)
|
|
|
|
try:
|
|
|
|
return act_layer(inplace=inplace, **kwargs)
|
|
|
|
except TypeError:
|
|
|
|
# recover if act layer doesn't have inplace arg
|
|
|
|
return act_layer(**kwargs)
|