Fix an an untested change, remove a debug print
parent
eb7653614f
commit
f28170df3f
|
@ -220,7 +220,7 @@ class InvertedResidual(nn.Module):
|
||||||
has_se = se_ratio is not None and se_ratio > 0.
|
has_se = se_ratio is not None and se_ratio > 0.
|
||||||
self.has_residual = (in_chs == out_chs and stride == 1) and not noskip
|
self.has_residual = (in_chs == out_chs and stride == 1) and not noskip
|
||||||
self.drop_path_rate = drop_path_rate
|
self.drop_path_rate = drop_path_rate
|
||||||
print(act_layer)
|
|
||||||
# Point-wise expansion
|
# Point-wise expansion
|
||||||
self.conv_pw = create_conv2d(in_chs, mid_chs, exp_kernel_size, padding=pad_type, **conv_kwargs)
|
self.conv_pw = create_conv2d(in_chs, mid_chs, exp_kernel_size, padding=pad_type, **conv_kwargs)
|
||||||
self.bn1 = norm_layer(mid_chs, **norm_kwargs)
|
self.bn1 = norm_layer(mid_chs, **norm_kwargs)
|
||||||
|
|
|
@ -60,7 +60,7 @@ class BatchNormAct2d(nn.BatchNorm2d):
|
||||||
if torch.jit.is_scripting():
|
if torch.jit.is_scripting():
|
||||||
x = self._forward_jit(x)
|
x = self._forward_jit(x)
|
||||||
else:
|
else:
|
||||||
self._forward_python(x)
|
x = self._forward_python(x)
|
||||||
if self.act is not None:
|
if self.act is not None:
|
||||||
x = self.act(x)
|
x = self.act(x)
|
||||||
return x
|
return x
|
||||||
|
|
Loading…
Reference in New Issue