mirror of
https://github.com/open-mmlab/mmpretrain.git
synced 2025-06-03 14:59:18 +08:00
* [Refactor] Move transforms in mmselfsup to mmpretrain. * Update transform docs and configs. And register some mmcv transforms in mmpretrain. * Fix missing transform wrapper. * update selfsup transforms * Fix UT * Fix UT * update gaussianblur inconfigs --------- Co-authored-by: fangyixiao18 <fangyx18@hotmail.com>
18 lines
500 B
Python
18 lines
500 B
Python
_base_ = [
|
|
'../../_base_/models/resnet50.py',
|
|
'../../_base_/datasets/imagenet_bs32_pil_resize.py',
|
|
'../../_base_/schedules/imagenet_sgd_steplr_100e.py',
|
|
'../../_base_/default_runtime.py',
|
|
]
|
|
|
|
model = dict(backbone=dict(frozen_stages=4))
|
|
|
|
# optimizer
|
|
optim_wrapper = dict(
|
|
type='OptimWrapper',
|
|
optimizer=dict(type='SGD', lr=30., momentum=0.9, weight_decay=0.))
|
|
|
|
# runtime settings
|
|
default_hooks = dict(
|
|
checkpoint=dict(type='CheckpointHook', interval=10, max_keep_ckpts=3))
|