[Refactor] Use `--gpu-id` instead of `--gpu-ids` in non-distributed multi-gpu training/testing. (#688)
parent
bca695b684
commit
44e9902979
|
@ -4,6 +4,7 @@ import copy
|
|||
import os
|
||||
import os.path as osp
|
||||
import time
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -62,13 +63,19 @@ def parse_args():
|
|||
group_gpus.add_argument(
|
||||
'--gpus',
|
||||
type=int,
|
||||
help='number of gpus to use '
|
||||
help='(Deprecated, please use --gpu-id) number of gpus to use '
|
||||
'(only applicable to non-distributed training)')
|
||||
group_gpus.add_argument(
|
||||
'--gpu-ids',
|
||||
type=int,
|
||||
nargs='+',
|
||||
help='ids of gpus to use '
|
||||
help='(Deprecated, please use --gpu-id) ids of gpus to use '
|
||||
'(only applicable to non-distributed training)')
|
||||
group_gpus.add_argument(
|
||||
'--gpu-id',
|
||||
type=int,
|
||||
default=0,
|
||||
help='id of gpu to use '
|
||||
'(only applicable to non-distributed training)')
|
||||
parser.add_argument('--seed', type=int, default=None, help='random seed')
|
||||
parser.add_argument(
|
||||
|
@ -316,10 +323,19 @@ def main():
|
|||
else:
|
||||
resume_fold = 0
|
||||
|
||||
if args.gpus is not None:
|
||||
cfg.gpu_ids = range(1)
|
||||
warnings.warn('`--gpus` is deprecated because we only support '
|
||||
'single GPU mode in non-distributed training. '
|
||||
'Use `gpus=1` now.')
|
||||
if args.gpu_ids is not None:
|
||||
cfg.gpu_ids = args.gpu_ids
|
||||
else:
|
||||
cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus)
|
||||
cfg.gpu_ids = args.gpu_ids[0:1]
|
||||
warnings.warn('`--gpu-ids` is deprecated, please use `--gpu-id`. '
|
||||
'Because we only support single GPU mode in '
|
||||
'non-distributed training. Use the first GPU '
|
||||
'in `gpu_ids` now.')
|
||||
if args.gpus is None and args.gpu_ids is None:
|
||||
cfg.gpu_ids = [args.gpu_id]
|
||||
|
||||
# init distributed env first, since logger depends on the dist info.
|
||||
if args.launcher == 'none':
|
||||
|
|
|
@ -80,7 +80,13 @@ def parse_args():
|
|||
'--gpu-ids',
|
||||
type=int,
|
||||
nargs='+',
|
||||
help='ids of gpus to use '
|
||||
help='(Deprecated, please use --gpu-id) ids of gpus to use '
|
||||
'(only applicable to non-distributed testing)')
|
||||
parser.add_argument(
|
||||
'--gpu-id',
|
||||
type=int,
|
||||
default=0,
|
||||
help='id of gpu to use '
|
||||
'(only applicable to non-distributed testing)')
|
||||
parser.add_argument(
|
||||
'--launcher',
|
||||
|
@ -115,18 +121,17 @@ def main():
|
|||
cfg.data.test.test_mode = True
|
||||
|
||||
if args.gpu_ids is not None:
|
||||
cfg.gpu_ids = args.gpu_ids
|
||||
cfg.gpu_ids = args.gpu_ids[0:1]
|
||||
warnings.warn('`--gpu-ids` is deprecated, please use `--gpu-id`. '
|
||||
'Because we only support single GPU mode in '
|
||||
'non-distributed testing. Use the first GPU '
|
||||
'in `gpu_ids` now.')
|
||||
else:
|
||||
cfg.gpu_ids = range(1)
|
||||
cfg.gpu_ids = [args.gpu_id]
|
||||
|
||||
# init distributed env first, since logger depends on the dist info.
|
||||
if args.launcher == 'none':
|
||||
distributed = False
|
||||
if len(cfg.gpu_ids) > 1:
|
||||
warnings.warn(f'The gpu-ids is reset from {cfg.gpu_ids} to '
|
||||
f'{cfg.gpu_ids[0:1]} to avoid potential error in '
|
||||
'non-distribute testing time.')
|
||||
cfg.gpu_ids = cfg.gpu_ids[0:1]
|
||||
else:
|
||||
distributed = True
|
||||
init_dist(args.launcher, **cfg.dist_params)
|
||||
|
|
|
@ -33,13 +33,19 @@ def parse_args():
|
|||
group_gpus.add_argument(
|
||||
'--gpus',
|
||||
type=int,
|
||||
help='number of gpus to use '
|
||||
help='(Deprecated, please use --gpu-id) number of gpus to use '
|
||||
'(only applicable to non-distributed training)')
|
||||
group_gpus.add_argument(
|
||||
'--gpu-ids',
|
||||
type=int,
|
||||
nargs='+',
|
||||
help='ids of gpus to use '
|
||||
help='(Deprecated, please use --gpu-id) ids of gpus to use '
|
||||
'(only applicable to non-distributed training)')
|
||||
group_gpus.add_argument(
|
||||
'--gpu-id',
|
||||
type=int,
|
||||
default=0,
|
||||
help='id of gpu to use '
|
||||
'(only applicable to non-distributed training)')
|
||||
parser.add_argument('--seed', type=int, default=None, help='random seed')
|
||||
parser.add_argument(
|
||||
|
@ -93,10 +99,19 @@ def main():
|
|||
osp.splitext(osp.basename(args.config))[0])
|
||||
if args.resume_from is not None:
|
||||
cfg.resume_from = args.resume_from
|
||||
if args.gpus is not None:
|
||||
cfg.gpu_ids = range(1)
|
||||
warnings.warn('`--gpus` is deprecated because we only support '
|
||||
'single GPU mode in non-distributed training. '
|
||||
'Use `gpus=1` now.')
|
||||
if args.gpu_ids is not None:
|
||||
cfg.gpu_ids = args.gpu_ids
|
||||
else:
|
||||
cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus)
|
||||
cfg.gpu_ids = args.gpu_ids[0:1]
|
||||
warnings.warn('`--gpu-ids` is deprecated, please use `--gpu-id`. '
|
||||
'Because we only support single GPU mode in '
|
||||
'non-distributed training. Use the first GPU '
|
||||
'in `gpu_ids` now.')
|
||||
if args.gpus is None and args.gpu_ids is None:
|
||||
cfg.gpu_ids = [args.gpu_id]
|
||||
|
||||
# init distributed env first, since logger depends on the dist info.
|
||||
if args.launcher == 'none':
|
||||
|
|
Loading…
Reference in New Issue