mirror of
https://github.com/open-mmlab/mmselfsup.git
synced 2025-06-03 14:59:38 +08:00
* [Fix]: Do not decay relative pos bias * [Feature]: MAE ft benchmark config * [Fix]: Fix mae benchmark config * [Refactor]: Delete mc * [Fix]: Add comment to config * [Feature]: Add MAE linprob config * [Feature]: Add ClsBatchNormNeck * [Feature]: Add mc * [Fix]: Fix custom import * [Feature]: Add mae linear prob sgd optim * [Feature]: Add petrel * [Feature]: Add ceph and mc * [Feature]: Add mae large * [Feature]: Add MAE ViT-large config * [Fix]: Fix MAE ft and pt config * [Fix]: Fix ViT-L lr scheduler bug * [Fix]: Change the ViT-large ft config name * [Fix]: Change mc to petrel in vit benchmark * [Feature]: Add mae huge config * [Fix]: Fix ViT huge 100e pretrain setting * [Feature]: Add ViT large ft * [Feature]: Use ceph * [Fix]: Fix MAE ViT-L 800e lr schedule * [Feature]: Set mae vit-l start factor to 1e-9 * [Fix]: Delete drop_after_pos * [Feature]: Create MAE ViT-h 1600e, remove 100e * [Fix]: Fix cae ft config * [Fix]: Set output cls token to False in mae ft * [Fix]: Set drop path rate to 0.1 in mae large ft * [Fix]: Use linear head in mae ft config * [Fix]: Fix lint * [Feature]: Add mae huge ft * [Feature]: Add vit-h 448 ft * [Refactor]: Change fp16 to amp in config name * [Fix]: Use disk file client
55 lines
1.4 KiB
Python
55 lines
1.4 KiB
Python
_base_ = [
|
|
'../_base_/models/mae_vit-base-p16.py',
|
|
'../_base_/datasets/imagenet_mae.py',
|
|
'../_base_/schedules/adamw_coslr-200e_in1k.py',
|
|
'../_base_/default_runtime.py',
|
|
]
|
|
|
|
# dataset 8 x 512
|
|
train_dataloader = dict(batch_size=512, num_workers=8)
|
|
|
|
# optimizer wrapper
|
|
optimizer = dict(
|
|
type='AdamW', lr=1.5e-4 * 4096 / 256, betas=(0.9, 0.95), weight_decay=0.05)
|
|
optim_wrapper = dict(
|
|
type='OptimWrapper',
|
|
optimizer=optimizer,
|
|
paramwise_cfg=dict(
|
|
custom_keys={
|
|
'ln': dict(decay_mult=0.0),
|
|
'bias': dict(decay_mult=0.0),
|
|
'pos_embed': dict(decay_mult=0.),
|
|
'mask_token': dict(decay_mult=0.),
|
|
'cls_token': dict(decay_mult=0.)
|
|
}))
|
|
|
|
# learning rate scheduler
|
|
param_scheduler = [
|
|
dict(
|
|
type='LinearLR',
|
|
start_factor=1e-4,
|
|
by_epoch=True,
|
|
begin=0,
|
|
end=40,
|
|
convert_to_iter_based=True),
|
|
dict(
|
|
type='CosineAnnealingLR',
|
|
T_max=360,
|
|
by_epoch=True,
|
|
begin=40,
|
|
end=400,
|
|
convert_to_iter_based=True)
|
|
]
|
|
|
|
# runtime settings
|
|
# pre-train for 400 epochs
|
|
train_cfg = dict(max_epochs=400)
|
|
default_hooks = dict(
|
|
logger=dict(type='LoggerHook', interval=100),
|
|
# only keeps the latest 3 checkpoints
|
|
checkpoint=dict(type='CheckpointHook', interval=1, max_keep_ckpts=3))
|
|
|
|
# randomness
|
|
randomness = dict(seed=0, diff_rank_seed=True)
|
|
resume = True
|