Commit 448cb299 authored by rprenger's avatar rprenger
Browse files

Fixing bug caused by merge

parent feea48cd
...@@ -180,7 +180,6 @@ def _initialize_distributed(): ...@@ -180,7 +180,6 @@ def _initialize_distributed():
torch.distributed.init_process_group( torch.distributed.init_process_group(
backend=args.distributed_backend, backend=args.distributed_backend,
world_size=args.world_size, rank=args.rank, world_size=args.world_size, rank=args.rank,
init_method=init_method,
timeout=timedelta(days=7)) timeout=timedelta(days=7))
# Set the tensor model-parallel, pipeline model-parallel, and # Set the tensor model-parallel, pipeline model-parallel, and
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment