From 98d9d3950554387b0a5a2b23cc7c445cb9c1738a Mon Sep 17 00:00:00 2001 From: gaotongxiao Date: Fri, 13 May 2022 15:55:06 +0800 Subject: [PATCH] Migrate configs to new styles --- configs/_base_/default_runtime.py | 34 +++++++++++-------- .../_base_/schedules/schedule_adadelta_18e.py | 8 ----- .../_base_/schedules/schedule_adadelta_5e.py | 16 +++++---- .../_base_/schedules/schedule_adam_600e.py | 15 ++++---- .../schedules/schedule_adam_step_12e.py | 18 +++++----- .../schedules/schedule_adam_step_20e.py | 20 +++++------ .../_base_/schedules/schedule_adam_step_5e.py | 13 ++++--- .../schedules/schedule_adam_step_600e.py | 13 ++++--- .../_base_/schedules/schedule_adam_step_6e.py | 14 ++++---- .../schedules/schedule_sgd_100k_iters.py | 13 ++++--- .../_base_/schedules/schedule_sgd_1200e.py | 13 ++++--- .../_base_/schedules/schedule_sgd_1500e.py | 14 ++++---- configs/_base_/schedules/schedule_sgd_160e.py | 19 +++++------ configs/_base_/schedules/schedule_sgd_600e.py | 14 ++++---- 14 files changed, 118 insertions(+), 106 deletions(-) delete mode 100644 configs/_base_/schedules/schedule_adadelta_18e.py diff --git a/configs/_base_/default_runtime.py b/configs/_base_/default_runtime.py index de7f9650..67f6ed44 100644 --- a/configs/_base_/default_runtime.py +++ b/configs/_base_/default_runtime.py @@ -1,17 +1,21 @@ -# yapf:disable -log_config = dict( - interval=5, - hooks=[ - dict(type='TextLoggerHook') - ]) -# yapf:enable -dist_params = dict(backend='nccl') +# custom_imports = dict(imports=['mmcv.transforms'], allow_failed_imports=False) # noqa +default_scope = 'mmocr' + +default_hooks = dict( + optimizer=dict(type='OptimizerHook', grad_clip=None), + timer=dict(type='IterTimerHook'), + logger=dict(type='LoggerHook', interval=5), + param_scheduler=dict(type='ParamSchedulerHook'), + checkpoint=dict(type='CheckpointHook', interval=1), + sampler_seed=dict(type='DistSamplerSeedHook'), +) + +env_cfg = dict( + cudnn_benchmark=True, + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + dist_cfg=dict(backend='nccl'), +) + log_level = 'INFO' load_from = None -resume_from = None -workflow = [('train', 1)] - -# disable opencv multithreading to avoid system being overloaded -opencv_num_threads = 0 -# set multi-process start method as `fork` to speed up the training -mp_start_method = 'fork' +resume = False diff --git a/configs/_base_/schedules/schedule_adadelta_18e.py b/configs/_base_/schedules/schedule_adadelta_18e.py deleted file mode 100644 index 33f7960c..00000000 --- a/configs/_base_/schedules/schedule_adadelta_18e.py +++ /dev/null @@ -1,8 +0,0 @@ -# optimizer -optimizer = dict(type='Adadelta', lr=0.5) -optimizer_config = dict(grad_clip=dict(max_norm=0.5)) -# learning policy -lr_config = dict(policy='step', step=[8, 14, 16]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=18) -checkpoint_config = dict(interval=1) diff --git a/configs/_base_/schedules/schedule_adadelta_5e.py b/configs/_base_/schedules/schedule_adadelta_5e.py index ad996d65..1b813534 100644 --- a/configs/_base_/schedules/schedule_adadelta_5e.py +++ b/configs/_base_/schedules/schedule_adadelta_5e.py @@ -1,8 +1,10 @@ -# optimizer optimizer = dict(type='Adadelta', lr=1.0) -optimizer_config = dict(grad_clip=None) -# learning policy -lr_config = dict(policy='step', step=[]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=5) -checkpoint_config = dict(interval=1) + +train_cfg = dict(by_epoch=True, max_epochs=5) +val_cfg = dict(interval=1) +test_cfg = dict() + +# learning rate +param_scheduler = [ + dict(type='ConstantLR'), +] diff --git a/configs/_base_/schedules/schedule_adam_600e.py b/configs/_base_/schedules/schedule_adam_600e.py index a77dc520..9c30259c 100644 --- a/configs/_base_/schedules/schedule_adam_600e.py +++ b/configs/_base_/schedules/schedule_adam_600e.py @@ -1,8 +1,11 @@ # optimizer optimizer = dict(type='Adam', lr=1e-3) -optimizer_config = dict(grad_clip=None) -# learning policy -lr_config = dict(policy='poly', power=0.9) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=600) -checkpoint_config = dict(interval=100) + +train_cfg = dict(by_epoch=True, max_epochs=600) +val_cfg = dict(interval=20) +test_cfg = dict() + +# learning rate +param_scheduler = [ + dict(type='PolyLR', power=0.9, end=600), +] diff --git a/configs/_base_/schedules/schedule_adam_step_12e.py b/configs/_base_/schedules/schedule_adam_step_12e.py index c92289d3..fdfc2ca1 100644 --- a/configs/_base_/schedules/schedule_adam_step_12e.py +++ b/configs/_base_/schedules/schedule_adam_step_12e.py @@ -1,12 +1,12 @@ # optimizer optimizer = dict(type='Adam', lr=4e-4) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=12) +val_cfg = dict(interval=1) +test_cfg = dict() + # learning policy -lr_config = dict( - policy='step', - warmup='linear', - warmup_iters=100, - warmup_ratio=1.0 / 3, - step=[11]) -runner = dict(type='EpochBasedRunner', max_epochs=12) -checkpoint_config = dict(interval=1) +param_scheduler = [ + dict(type='LinearLR', end=100, by_epoch=False), + dict(type='MultiStepLR', milestones=[11], end=12), +] diff --git a/configs/_base_/schedules/schedule_adam_step_20e.py b/configs/_base_/schedules/schedule_adam_step_20e.py index 81fb92cb..f35c2f4a 100644 --- a/configs/_base_/schedules/schedule_adam_step_20e.py +++ b/configs/_base_/schedules/schedule_adam_step_20e.py @@ -1,14 +1,12 @@ # optimizer optimizer = dict(type='Adam', lr=1e-4) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=20) +val_cfg = dict(interval=1) +test_cfg = dict() + # learning policy -lr_config = dict( - policy='step', - step=[16, 18], - warmup='linear', - warmup_iters=1, - warmup_ratio=0.001, - warmup_by_epoch=True) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=20) -checkpoint_config = dict(interval=1) +param_scheduler = [ + dict(type='LinearLR', end=1, start_factor=0.001), + dict(type='MultiStepLR', milestones=[16, 18], end=20), +] diff --git a/configs/_base_/schedules/schedule_adam_step_5e.py b/configs/_base_/schedules/schedule_adam_step_5e.py index 371a3781..b85bd427 100644 --- a/configs/_base_/schedules/schedule_adam_step_5e.py +++ b/configs/_base_/schedules/schedule_adam_step_5e.py @@ -1,8 +1,11 @@ # optimizer optimizer = dict(type='Adam', lr=1e-3) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=5) +val_cfg = dict(interval=1) +test_cfg = dict() + # learning policy -lr_config = dict(policy='step', step=[3, 4]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=5) -checkpoint_config = dict(interval=1) +param_scheduler = [ + dict(type='MultiStepLR', milestones=[3, 4], end=5), +] diff --git a/configs/_base_/schedules/schedule_adam_step_600e.py b/configs/_base_/schedules/schedule_adam_step_600e.py index 5daa2d4c..ea7db360 100644 --- a/configs/_base_/schedules/schedule_adam_step_600e.py +++ b/configs/_base_/schedules/schedule_adam_step_600e.py @@ -1,8 +1,11 @@ # optimizer optimizer = dict(type='Adam', lr=1e-4) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=600) +val_cfg = dict(interval=40) +test_cfg = dict() + # learning policy -lr_config = dict(policy='step', step=[200, 400]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=600) -checkpoint_config = dict(interval=100) +param_scheduler = [ + dict(type='MultiStepLR', milestones=[200, 400], end=600), +] diff --git a/configs/_base_/schedules/schedule_adam_step_6e.py b/configs/_base_/schedules/schedule_adam_step_6e.py index 5b33a2f9..12da6cc8 100644 --- a/configs/_base_/schedules/schedule_adam_step_6e.py +++ b/configs/_base_/schedules/schedule_adam_step_6e.py @@ -1,8 +1,8 @@ -# optimizer -optimizer = dict(type='Adam', lr=1e-3) -optimizer_config = dict(grad_clip=None) +_base_ = 'schedule_adam_step_5e.py' + +train_cfg = dict(by_epoch=True, max_epochs=6) + # learning policy -lr_config = dict(policy='step', step=[3, 4]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=6) -checkpoint_config = dict(interval=1) +param_scheduler = [ + dict(type='MultiStepLR', milestones=[3, 4], end=6), +] diff --git a/configs/_base_/schedules/schedule_sgd_100k_iters.py b/configs/_base_/schedules/schedule_sgd_100k_iters.py index df2a3300..796fd13d 100644 --- a/configs/_base_/schedules/schedule_sgd_100k_iters.py +++ b/configs/_base_/schedules/schedule_sgd_100k_iters.py @@ -1,8 +1,11 @@ # optimizer optimizer = dict(type='SGD', lr=0.007, momentum=0.9, weight_decay=0.0001) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=False, max_iters=100000) +val_cfg = dict(interval=100001) # Never evaluate +test_cfg = dict() + # learning policy -lr_config = dict(policy='poly', power=0.9, min_lr=1e-7, by_epoch=False) -# running settings -runner = dict(type='IterBasedRunner', max_iters=100000) -checkpoint_config = dict(interval=10000) +param_scheduler = [ + dict(type='PolyLR', power=0.9, eta_min=1e-7, by_epoch=False, end=100000), +] diff --git a/configs/_base_/schedules/schedule_sgd_1200e.py b/configs/_base_/schedules/schedule_sgd_1200e.py index bc7fbf69..182bbf9e 100644 --- a/configs/_base_/schedules/schedule_sgd_1200e.py +++ b/configs/_base_/schedules/schedule_sgd_1200e.py @@ -1,8 +1,11 @@ # optimizer optimizer = dict(type='SGD', lr=0.007, momentum=0.9, weight_decay=0.0001) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=1200) +val_cfg = dict(interval=20) # Never evaluate +test_cfg = dict() + # learning policy -lr_config = dict(policy='poly', power=0.9, min_lr=1e-7, by_epoch=True) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=1200) -checkpoint_config = dict(interval=100) +param_scheduler = [ + dict(type='PolyLR', power=0.9, eta_min=1e-7, end=1200), +] diff --git a/configs/_base_/schedules/schedule_sgd_1500e.py b/configs/_base_/schedules/schedule_sgd_1500e.py index 3368175e..18128bcb 100644 --- a/configs/_base_/schedules/schedule_sgd_1500e.py +++ b/configs/_base_/schedules/schedule_sgd_1500e.py @@ -1,8 +1,8 @@ -# optimizer -optimizer = dict(type='SGD', lr=1e-3, momentum=0.90, weight_decay=5e-4) -optimizer_config = dict(grad_clip=None) +train_cfg = dict(by_epoch=True, max_epochs=1500) +val_cfg = dict(interval=20) # Never evaluate +test_cfg = dict() + # learning policy -lr_config = dict(policy='poly', power=0.9, min_lr=1e-7, by_epoch=True) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=1500) -checkpoint_config = dict(interval=100) +param_scheduler = [ + dict(type='PolyLR', power=0.9, eta_min=1e-7, end=1500), +] diff --git a/configs/_base_/schedules/schedule_sgd_160e.py b/configs/_base_/schedules/schedule_sgd_160e.py index 985b8f63..7fbc755e 100644 --- a/configs/_base_/schedules/schedule_sgd_160e.py +++ b/configs/_base_/schedules/schedule_sgd_160e.py @@ -1,13 +1,12 @@ # optimizer optimizer = dict(type='SGD', lr=0.08, momentum=0.9, weight_decay=0.0001) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=160) +val_cfg = dict(interval=20) +test_cfg = dict() + # learning policy -lr_config = dict( - policy='step', - warmup='linear', - warmup_iters=500, - warmup_ratio=0.001, - step=[80, 128]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=160) -checkpoint_config = dict(interval=10) +param_scheduler = [ + dict(type='LinearLR', end=500, start_factor=0.001, by_epoch=False), + dict(type='MultiStepLR', milestones=[80, 128], end=160), +] diff --git a/configs/_base_/schedules/schedule_sgd_600e.py b/configs/_base_/schedules/schedule_sgd_600e.py index ed57b422..98001b16 100644 --- a/configs/_base_/schedules/schedule_sgd_600e.py +++ b/configs/_base_/schedules/schedule_sgd_600e.py @@ -1,8 +1,10 @@ -# optimizer optimizer = dict(type='SGD', lr=1e-3, momentum=0.99, weight_decay=5e-4) -optimizer_config = dict(grad_clip=None) + +train_cfg = dict(by_epoch=True, max_epochs=600) +val_cfg = dict(interval=50) +test_cfg = dict() + # learning policy -lr_config = dict(policy='step', step=[200, 400]) -# running settings -runner = dict(type='EpochBasedRunner', max_epochs=600) -checkpoint_config = dict(interval=100) +param_scheduler = [ + dict(type='MultiStepLR', milestones=[200, 400], end=600), +]