Unverified Commit 989a15d1 authored by David Hall's avatar David Hall Committed by GitHub
Browse files

fix _setup_devices in case where there is no torch.distributed package in build (#16821)

* fix _setup_devices in case where there is not torch.distributed

* in training_args_sm.py as well
parent c11a4957
......@@ -77,7 +77,7 @@ class SageMakerTrainingArguments(TrainingArguments):
@cached_property
def _setup_devices(self) -> "torch.device":
logger.info("PyTorch: setting up devices")
if torch.distributed.is_initialized() and self.local_rank == -1:
if torch.distributed.is_available() and torch.distributed.is_initialized() and self.local_rank == -1:
logger.warning(
"torch.distributed process group is initialized, but local_rank == -1. "
"In order to use Torch DDP, launch your script with `python -m torch.distributed.launch"
......
......@@ -1032,7 +1032,7 @@ class TrainingArguments:
@torch_required
def _setup_devices(self) -> "torch.device":
logger.info("PyTorch: setting up devices")
if torch.distributed.is_initialized() and self.local_rank == -1:
if torch.distributed.is_available() and torch.distributed.is_initialized() and self.local_rank == -1:
logger.warning(
"torch.distributed process group is initialized, but local_rank == -1. "
"In order to use Torch DDP, launch your script with `python -m torch.distributed.launch"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment