mirror of
https://github.com/open-mmlab/mmcv.git
synced 2025-06-03 21:54:52 +08:00
Remove redundant self.init_cfg (#959)
This commit is contained in:
parent
ae411509dd
commit
cb0ee1e339
@ -61,7 +61,6 @@ class MultiheadAttention(BaseModule):
|
||||
self.attn = nn.MultiheadAttention(embed_dims, num_heads, dropout,
|
||||
**kwargs)
|
||||
self.dropout = nn.Dropout(dropout)
|
||||
self.init_cfg = init_cfg
|
||||
|
||||
def forward(self,
|
||||
query,
|
||||
@ -171,7 +170,6 @@ class FFN(BaseModule):
|
||||
self.num_fcs = num_fcs
|
||||
self.act_cfg = act_cfg
|
||||
self.dropout = dropout
|
||||
self.init_cfg = init_cfg
|
||||
self.activate = build_activation_layer(act_cfg)
|
||||
|
||||
layers = []
|
||||
@ -422,7 +420,6 @@ class TransformerLayerSequence(BaseModule):
|
||||
else:
|
||||
assert isinstance(transformerlayers, list) and \
|
||||
len(transformerlayers) == num_layers
|
||||
self.init_cfg = init_cfg
|
||||
self.num_layers = num_layers
|
||||
operation_order = transformerlayers[0]['operation_order']
|
||||
self.pre_norm = operation_order[0] == 'norm'
|
||||
|
Loading…
x
Reference in New Issue
Block a user