Fix distributed train script

This commit is contained in:
Ross Wightman 2019-04-05 20:49:58 -07:00
parent 183d8e4aef
commit b0158a593e

View File

@ -1,5 +1,5 @@
#!/bin/bash
NUM_PROC=$1
shift
python -m torch.distributed.launch --nproc_per_node=$NUM_PROC dtrain.py "$@"
python -m torch.distributed.launch --nproc_per_node=$NUM_PROC train.py "$@"