GPUS=$1 | |
PORT=${PORT:-29500} | |
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ | |
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ | |
$(dirname "$0")/train.py --launcher pytorch ${@:2} | |
GPUS=$1 | |
PORT=${PORT:-29500} | |
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ | |
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ | |
$(dirname "$0")/train.py --launcher pytorch ${@:2} | |