Commit 3f3da214 authored by Hubert Lu's avatar Hubert Lu
Browse files

Update run_rocm_distributed.sh

parent 405956c3
...@@ -38,6 +38,8 @@ echo "Running syncbn python only tests" ...@@ -38,6 +38,8 @@ echo "Running syncbn python only tests"
python synced_batchnorm/python_single_gpu_unit_test.py python synced_batchnorm/python_single_gpu_unit_test.py
echo "Running syncbn batchnorm1d tests" echo "Running syncbn batchnorm1d tests"
python synced_batchnorm/test_batchnorm1d.py python synced_batchnorm/test_batchnorm1d.py
#beware, you need a system with at least 4 gpus to test group_size<world_size (currently fail both on upstream and rocm fork)
#python -m torch.distributed.launch --nproc_per_node=4 test_groups.py --group_size=2
## Run the DDP Tests ## Run the DDP Tests
echo "running DDP tests" echo "running DDP tests"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment