Unverified Commit 1b2658b9 authored by Chao Ma's avatar Chao Ma Committed by GitHub
Browse files

update (#2192)

parent 1aa25cb6
...@@ -228,14 +228,6 @@ def run(args, device, data): ...@@ -228,14 +228,6 @@ def run(args, device, data):
forward_time += forward_end - start forward_time += forward_end - start
backward_time += compute_end - forward_end backward_time += compute_end - forward_end
# Aggregate gradients in multiple nodes.
if not args.standalone:
for param in model.parameters():
if param.requires_grad and param.grad is not None:
th.distributed.all_reduce(param.grad.data,
op=th.distributed.ReduceOp.SUM)
param.grad.data /= dgl.distributed.get_num_client()
optimizer.step() optimizer.step()
update_time += time.time() - compute_end update_time += time.time() - compute_end
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment