[Fix] Auto scale lr ()

pull/1587/head
Tong Gao 2022-12-01 14:07:10 +08:00 committed by GitHub
parent 2b6d258ae1
commit c957ded662
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 7 additions and 7 deletions

View File

@ -90,15 +90,15 @@ def main():
if args.resume: if args.resume:
cfg.resume = True cfg.resume = True
# enable automatically scaling LR
if args.auto_scale_lr: if args.auto_scale_lr:
if cfg.get('auto_scale_lr'): if 'auto_scale_lr' in cfg and \
cfg.auto_scale_lr = True 'base_batch_size' in cfg.auto_scale_lr:
cfg.auto_scale_lr.enable = True
else: else:
print_log( raise RuntimeError('Can not find "auto_scale_lr" or '
'auto_scale_lr does not exist in your config, ' '"auto_scale_lr.base_batch_size" in your'
'please set `auto_scale_lr = dict(base_batch_size=xx)', ' configuration file.')
logger='current',
level=logging.WARNING)
# build the runner from config # build the runner from config
if 'runner_type' not in cfg: if 'runner_type' not in cfg: