[Fix] Fix ATTENTION registry (#729)

* register ATTENTION registry from the parent ATTENTION registry of MMCV to avoid conflict with other repos

* remove redundant file
pull/1801/head
Junjun2016 2021-07-27 23:19:12 +08:00 committed by GitHub
parent 2b021e3168
commit b5ae7a7f69
2 changed files with 3 additions and 2 deletions

View File

@ -5,7 +5,6 @@ import torch
import torch.nn as nn
import torch.nn.functional as F
from mmcv.cnn import build_norm_layer, trunc_normal_init
from mmcv.cnn.bricks.registry import ATTENTION
from mmcv.cnn.bricks.transformer import FFN, build_dropout
from mmcv.cnn.utils.weight_init import constant_init
from mmcv.runner import _load_checkpoint
@ -15,7 +14,7 @@ from torch.nn.modules.normalization import LayerNorm
from torch.nn.modules.utils import _pair as to_2tuple
from ...utils import get_root_logger
from ..builder import BACKBONES
from ..builder import ATTENTION, BACKBONES
from ..utils import PatchEmbed, swin_convert

View File

@ -1,9 +1,11 @@
import warnings
from mmcv.cnn import MODELS as MMCV_MODELS
from mmcv.cnn.bricks.registry import ATTENTION as MMCV_ATTENTION
from mmcv.utils import Registry
MODELS = Registry('models', parent=MMCV_MODELS)
ATTENTION = Registry('attention', parent=MMCV_ATTENTION)
BACKBONES = MODELS
NECKS = MODELS