@@ -131,10 +131,10 @@ class DeepSpeedEngine(Module):
dist_init_required=notdist.is_initialized()
ifdist_init_requiredisFalse:
assert(dist.is_initialized()==True),"Torch distributed not initialized. Please set dist_init_required to True or initialize before calling deepspeed.initialize()"
# Initialize torch distributed if needed
init_distributed(dist_backend=self.dist_backend)
assertdist.is_initialized()isTrue,"Torch distributed not initialized. Please set dist_init_required to True or initialize before calling deepspeed.initialize()"