mmselfsup/configs/selfsup/simclr/simclr_resnet50_8xb32-coslr...

43 lines
1.1 KiB
Python

_base_ = [
'../_base_/models/simclr.py',
'../_base_/datasets/imagenet_simclr.py',
'../_base_/schedules/lars_coslr-200e_in1k.py',
'../_base_/default_runtime.py',
]
# optimizer
optimizer = dict(
type='LARS',
lr=0.3,
momentum=0.9,
weight_decay=1e-6,
paramwise_options={
'(bn|gn)(\\d+)?.(weight|bias)':
dict(weight_decay=0., lars_exclude=True),
'bias': dict(weight_decay=0., lars_exclude=True)
})
# learning rate scheduler
param_scheduler = [
dict(
type='LinearLR',
start_factor=1e-4,
by_epoch=True,
begin=0,
end=10,
convert_to_iter_based=True),
dict(
type='CosineAnnealingLR',
T_max=190,
eta_min=0.,
by_epoch=True,
begin=10,
end=200)
]
# runtime settings
# the max_keep_ckpts controls the max number of ckpt file in your work_dirs
# if it is 3, when CheckpointHook (in mmcv) saves the 4th ckpt
# it will remove the oldest one to keep the number of total ckpts as 3
checkpoint_config = dict(interval=10, max_keep_ckpts=3)