mmselfsup/tools/dist_train.sh

13 lines
326 B
Bash
Raw Normal View History

2020-06-16 00:05:18 +08:00
#!/usr/bin/env bash
PYTHON=${PYTHON:-"python"}
CFG=$1
GPUS=$2
PY_ARGS=${@:3}
2020-06-17 01:31:59 +08:00
PORT=${PORT:-29500}
2020-06-16 00:05:18 +08:00
WORK_DIR=$(echo ${CFG%.*} | sed -e "s/configs/work_dirs/g")/
$PYTHON -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
tools/train.py $CFG --work_dir $WORK_DIR --seed 0 --launcher pytorch ${PY_ARGS}