Update train.py to compat with new config

pull/1802/head
mzr1996 2023-10-11 11:12:32 +08:00
parent b0a792eb08
commit 4849324629
1 changed files with 0 additions and 4 deletions

View File

@ -91,10 +91,6 @@ def merge_args(cfg, args):
# enable automatic-mixed-precision training
if args.amp is True:
optim_wrapper = cfg.optim_wrapper.get('type', 'OptimWrapper')
assert optim_wrapper in ['OptimWrapper', 'AmpOptimWrapper'], \
'`--amp` is not supported custom optimizer wrapper type ' \
f'`{optim_wrapper}.'
cfg.optim_wrapper.type = 'AmpOptimWrapper'
cfg.optim_wrapper.setdefault('loss_scale', 'dynamic')