mmocr/tools/dist_train.sh

21 lines
443 B
Bash
Raw Normal View History

2021-04-03 01:03:52 +08:00
#!/usr/bin/env bash
2021-04-03 01:03:52 +08:00
CONFIG=$1
GPUS=$2
NNODES=${NNODES:-1}
NODE_RANK=${NODE_RANK:-0}
2021-04-03 01:03:52 +08:00
PORT=${PORT:-29500}
MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"}
2021-04-03 01:03:52 +08:00
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
python -m torch.distributed.launch \
--nnodes=$NNODES \
--node_rank=$NODE_RANK \
--master_addr=$MASTER_ADDR \
--nproc_per_node=$GPUS \
--master_port=$PORT \
$(dirname "$0")/train.py \
$CONFIG \
--launcher pytorch ${@:3}