assert(dist.is_initialized()==True),"Torch distributed not initialized. Please set dist_init_required to True or initialize before calling deepspeed.initialize()"
else:
self._mpi_check(args,dist_init_required)
self.dist_backend="nccl"
# DeepSpeed will initialize torch distributed only if the user has not already intialized it.
ifdist_init_required:
ifdist_init_requiredandnotdist.is_initialized():
ifnotdist.is_initialized():
# discover using mpi4py if user specifies the flag
logger.info("Initializing torch distributed with backend: {}".format(