mmselfsup/benchmarks/dist_train_linear.sh

25 lines
739 B
Bash
Raw Normal View History

2020-06-17 01:31:59 +08:00
#!/usr/bin/env bash
set -e
set -x
2020-06-29 00:10:34 +08:00
CFG=$1 # use cfgs under "configs/benchmarks/linear_classification/"
2020-06-17 01:31:59 +08:00
PRETRAIN=$2
2020-07-19 17:52:47 +08:00
PY_ARGS=${@:3} # --resume_from --deterministic
2020-07-19 18:08:00 +08:00
GPUS=8 # When changing GPUS, please also change imgs_per_gpu in the config file accordingly to ensure the total batch size is 256.
2020-06-17 01:31:59 +08:00
PORT=${PORT:-29500}
2020-06-18 00:37:23 +08:00
if [ "$CFG" == "" ] || [ "$PRETRAIN" == "" ]; then
echo "ERROR: Missing arguments."
exit
fi
2020-06-17 01:31:59 +08:00
WORK_DIR="$(echo ${CFG%.*} | sed -e "s/configs/work_dirs/g")/$(echo $PRETRAIN | rev | cut -d/ -f 1 | rev)"
# train
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
tools/train.py \
$CFG \
--pretrained $PRETRAIN \
--work_dir $WORK_DIR --seed 0 --launcher="pytorch" ${PY_ARGS}