#!/usr/bin/env bash # export HIP_VISIBLE_DEVICES=3 GPUS=$1 CONFIG=$2 PORT=${PORT:-4321} # usage if [ $# -ne 2 ] ;then echo "usage:" echo "./scripts/dist_test.sh [number of gpu] [path to option file]" exit fi # PYTHONPATH="$(dirname $0)/..:${PYTHONPATH}" \ # python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ # basicsr/test.py -opt $CONFIG --launcher pytorch torchrun --nnodes=1 --nproc_per_node=$GPUS --rdzv_id=100 --rdzv_backend=c10d --rdzv_endpoint=localhost:29400 basicsr/test.py -opt $CONFIG