commit
c03e45cbf3
|
@ -39,6 +39,12 @@ default_cfgs = {
|
|||
'mobilenetv3_large_100': _cfg(
|
||||
interpolation='bicubic',
|
||||
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth'),
|
||||
'mobilenetv3_large_100_miil': _cfg(
|
||||
interpolation='bilinear', mean=(0, 0, 0), std=(1, 1, 1),
|
||||
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mobilenetv3_large_100_1k_miil_78_0.pth'),
|
||||
'mobilenetv3_large_100_miil_in21k': _cfg(
|
||||
interpolation='bilinear', mean=(0, 0, 0), std=(1, 1, 1),
|
||||
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mobilenetv3_large_100_in21k_miil.pth', num_classes=11221),
|
||||
'mobilenetv3_small_075': _cfg(url=''),
|
||||
'mobilenetv3_small_100': _cfg(url=''),
|
||||
'mobilenetv3_rw': _cfg(
|
||||
|
@ -367,6 +373,24 @@ def mobilenetv3_large_100(pretrained=False, **kwargs):
|
|||
return model
|
||||
|
||||
|
||||
@register_model
|
||||
def mobilenetv3_large_100_miil(pretrained=False, **kwargs):
|
||||
""" MobileNet V3
|
||||
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
|
||||
"""
|
||||
model = _gen_mobilenet_v3('mobilenetv3_large_100_miil', 1.0, pretrained=pretrained, **kwargs)
|
||||
return model
|
||||
|
||||
|
||||
@register_model
|
||||
def mobilenetv3_large_100_miil_in21k(pretrained=False, **kwargs):
|
||||
""" MobileNet V3, 21k pretraining
|
||||
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
|
||||
"""
|
||||
model = _gen_mobilenet_v3('mobilenetv3_large_100_miil_in21k', 1.0, pretrained=pretrained, **kwargs)
|
||||
return model
|
||||
|
||||
|
||||
@register_model
|
||||
def mobilenetv3_small_075(pretrained=False, **kwargs):
|
||||
""" MobileNet V3 """
|
||||
|
|
|
@ -32,7 +32,9 @@ def _cfg(url='', **kwargs):
|
|||
|
||||
default_cfgs = {
|
||||
'tresnet_m': _cfg(
|
||||
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_m_80_8-dbc13962.pth'),
|
||||
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/tresnet_m_1k_miil_83_1.pth'),
|
||||
'tresnet_m_miil_in21k': _cfg(
|
||||
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/tresnet_m_miil_in21k.pth', num_classes=11221),
|
||||
'tresnet_l': _cfg(
|
||||
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_l_81_5-235b486c.pth'),
|
||||
'tresnet_xl': _cfg(
|
||||
|
@ -265,6 +267,12 @@ def tresnet_m(pretrained=False, **kwargs):
|
|||
return _create_tresnet('tresnet_m', pretrained=pretrained, **model_kwargs)
|
||||
|
||||
|
||||
@register_model
|
||||
def tresnet_m_miil_in21k(pretrained=False, **kwargs):
|
||||
model_kwargs = dict(layers=[3, 4, 11, 3], **kwargs)
|
||||
return _create_tresnet('tresnet_m_miil_in21k', pretrained=pretrained, **model_kwargs)
|
||||
|
||||
|
||||
@register_model
|
||||
def tresnet_l(pretrained=False, **kwargs):
|
||||
model_kwargs = dict(layers=[4, 5, 18, 3], width_factor=1.2, **kwargs)
|
||||
|
|
|
@ -118,6 +118,17 @@ default_cfgs = {
|
|||
'vit_deit_base_distilled_patch16_384': _cfg(
|
||||
url='https://dl.fbaipublicfiles.com/deit/deit_base_distilled_patch16_384-d0272ac0.pth',
|
||||
input_size=(3, 384, 384), crop_pct=1.0, classifier=('head', 'head_dist')),
|
||||
|
||||
# ViT ImageNet-21K-P pretraining
|
||||
'vit_base_patch16_224_miil_in21k': _cfg(
|
||||
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/vit_base_patch16_224_in21k_miil.pth',
|
||||
mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221,
|
||||
),
|
||||
'vit_base_patch16_224_miil': _cfg(
|
||||
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm'
|
||||
'/vit_base_patch16_224_1k_miil_84_4.pth',
|
||||
mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear',
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
@ -687,3 +698,23 @@ def vit_deit_base_distilled_patch16_384(pretrained=False, **kwargs):
|
|||
model = _create_vision_transformer(
|
||||
'vit_deit_base_distilled_patch16_384', pretrained=pretrained, distilled=True, **model_kwargs)
|
||||
return model
|
||||
|
||||
|
||||
@register_model
|
||||
def vit_base_patch16_224_miil_in21k(pretrained=False, **kwargs):
|
||||
""" ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929).
|
||||
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
|
||||
"""
|
||||
model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs)
|
||||
model = _create_vision_transformer('vit_base_patch16_224_miil_in21k', pretrained=pretrained, **model_kwargs)
|
||||
return model
|
||||
|
||||
|
||||
@register_model
|
||||
def vit_base_patch16_224_miil(pretrained=False, **kwargs):
|
||||
""" ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929).
|
||||
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
|
||||
"""
|
||||
model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs)
|
||||
model = _create_vision_transformer('vit_base_patch16_224_miil', pretrained=pretrained, **model_kwargs)
|
||||
return model
|
Loading…
Reference in New Issue