[Feature] Add clamp activation layer. (#685)

* add clamp without unittest

* add clamp-act with unit test

* fix name bug

* use logical and

* fix logical_and

* fix linting

* rename ClampLayer to Clamp

* rename ClampLayer to Clamp

Co-authored-by: nbei <631557085@qq.com>
pull/702/head
Rui Xu 2020-12-09 10:28:32 +08:00 committed by GitHub
parent 1e925a05a2
commit cd96a84add
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 46 additions and 0 deletions

View File

@ -1,3 +1,4 @@
import torch
import torch.nn as nn
from mmcv.utils import build_from_cfg
@ -10,6 +11,38 @@ for module in [
ACTIVATION_LAYERS.register_module(module=module)
@ACTIVATION_LAYERS.register_module(name='Clip')
@ACTIVATION_LAYERS.register_module()
class Clamp(nn.Module):
"""Clamp activation layer.
This activation function is to clamp the feature map value within
:math:`[min, max]`. More details can be found in ``torch.clamp()``.
Args:
min (Number | optional): Lower-bound of the range to be clamped to.
Default to -1.
max (Number | optional): Upper-bound of the range to be clamped to.
Default to 1.
"""
def __init__(self, min=-1., max=1.):
super(Clamp, self).__init__()
self.min = min
self.max = max
def forward(self, x):
"""Forward function.
Args:
x (torch.Tensor): The input tensor.
Returns:
torch.Tensor: Clamped tensor.
"""
return torch.clamp(x, min=self.min, max=self.max)
def build_activation_layer(cfg):
"""Build activation layer.

View File

@ -1,3 +1,4 @@
import numpy as np
import pytest
import torch
import torch.nn as nn
@ -182,6 +183,18 @@ def test_build_activation_layer():
layer = build_activation_layer(cfg)
assert isinstance(layer, module)
# sanity check for Clamp
act = build_activation_layer(dict(type='Clamp'))
x = torch.randn(10) * 1000
y = act(x)
assert np.logical_and((y >= -1).numpy(), (y <= 1).numpy()).all()
act = build_activation_layer(dict(type='Clip', min=0))
y = act(x)
assert np.logical_and((y >= 0).numpy(), (y <= 1).numpy()).all()
act = build_activation_layer(dict(type='Clamp', max=0))
y = act(x)
assert np.logical_and((y >= -1).numpy(), (y <= 0).numpy()).all()
def test_build_padding_layer():
with pytest.raises(TypeError):