mmocr/tools/dist_train.sh

23 lines
482 B
Bash
Raw Normal View History

2021-04-03 01:03:52 +08:00
#!/usr/bin/env bash
if [ $# -lt 3 ]
then
echo "Usage: bash $0 CONFIG WORK_DIR GPUS"
exit
fi
CONFIG=$1
WORK_DIR=$2
GPUS=$3
PORT=${PORT:-29500}
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
if [ ${GPUS} == 1 ]; then
python $(dirname "$0")/train.py $CONFIG --work-dir=${WORK_DIR} ${@:4}
else
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
$(dirname "$0")/train.py $CONFIG --work-dir=${WORK_DIR} --launcher pytorch ${@:4}
fi