mirror of
https://github.com/open-mmlab/mmsegmentation.git
synced 2025-06-03 22:03:48 +08:00
* add Swin Transformer * add Swin Transformer * fixed import * Add some swin training settings. * Fix some filename error. * Fix attribute name: pretrain -> pretrained * Upload mmcls implementation of swin transformer. * Refactor Swin Transformer to follow mmcls style. * Refactor init_weigths of swin_transformer.py * Fix lint * Match inference precision * Add some comments * Add swin_convert to load official style ckpt * Remove arg: auto_pad * 1. Complete comments for each block; 2. Correct weight convert function; 3. Fix the pad of Patch Merging; * Clean function args. * Fix vit unit test. * 1. Add swin transformer unit tests; 2. Fix some pad bug; 3. Modify config to adapt new swin implementation; * Modify config arg * Update readme.md of swin * Fix config arg error and Add some swin benchmark msg. * Add MeM and ms test content for readme.md of swin transformer. * Fix doc string of swin module * 1. Register swin transformer to model list; 2. Modify pth url which keep meta attribute; * Update swin.py * Merge config settings. * Modify config style. * Update README.md Add ViT link * Modify main readme.md Co-authored-by: Jiarui XU <xvjiarui0826@gmail.com> Co-authored-by: sennnnn <201730271412@mail.scut.edu.cn> Co-authored-by: Junjun2016 <hejunjun@sjtu.edu.cn>
15 lines
538 B
Python
15 lines
538 B
Python
from .ckpt_convert import swin_convert, vit_convert
|
|
from .embed import PatchEmbed
|
|
from .inverted_residual import InvertedResidual, InvertedResidualV3
|
|
from .make_divisible import make_divisible
|
|
from .res_layer import ResLayer
|
|
from .se_layer import SELayer
|
|
from .self_attention_block import SelfAttentionBlock
|
|
from .up_conv_block import UpConvBlock
|
|
|
|
__all__ = [
|
|
'ResLayer', 'SelfAttentionBlock', 'make_divisible', 'InvertedResidual',
|
|
'UpConvBlock', 'InvertedResidualV3', 'SELayer', 'vit_convert',
|
|
'swin_convert', 'PatchEmbed'
|
|
]
|