"examples/git@developer.sourcefind.cn:OpenDAS/dgl.git" did not exist on "d30a69bf277effd9a013ddd81fa33661f2e31a01"
Unverified Commit f9bde91f authored by xiang song(charlie.song)'s avatar xiang song(charlie.song) Committed by GitHub
Browse files

[hotfix] Remove dead code (#1942)



* Remove dead code

* Fix
Co-authored-by: default avatarUbuntu <ubuntu@ip-172-31-51-214.ec2.internal>
parent 4f499c7f
...@@ -104,18 +104,12 @@ def main(args): ...@@ -104,18 +104,12 @@ def main(args):
batch_size=args.batch_size, shuffle=True, num_workers=0) batch_size=args.batch_size, shuffle=True, num_workers=0)
# validation sampler # validation sampler
# we do not use full neighbor to save computation resources
val_sampler = dgl.sampling.MultiLayerNeighborSampler([args.fanout] * args.n_layers) val_sampler = dgl.sampling.MultiLayerNeighborSampler([args.fanout] * args.n_layers)
val_loader = dgl.sampling.NodeDataLoader( val_loader = dgl.sampling.NodeDataLoader(
g, {category: val_idx}, val_sampler, g, {category: val_idx}, val_sampler,
batch_size=args.batch_size, shuffle=True, num_workers=0) batch_size=args.batch_size, shuffle=True, num_workers=0)
# test sampler
test_sampler = dgl.sampling.MultiLayerNeighborSampler([args.fanout] * args.n_layers)
test_loader = dgl.sampling.NodeDataLoader(
g, {category: test_idx}, test_sampler,
batch_size=args.batch_size, shuffle=True, num_workers=0)
# optimizer # optimizer
all_params = itertools.chain(model.parameters(), embed_layer.parameters()) all_params = itertools.chain(model.parameters(), embed_layer.parameters())
optimizer = th.optim.Adam(all_params, lr=args.lr, weight_decay=args.l2norm) optimizer = th.optim.Adam(all_params, lr=args.lr, weight_decay=args.l2norm)
......
...@@ -116,7 +116,6 @@ class GSpMM(th.autograd.Function): ...@@ -116,7 +116,6 @@ class GSpMM(th.autograd.Function):
elif op in ['add', 'sub', 'copy_rhs']: elif op in ['add', 'sub', 'copy_rhs']:
dY.scatter_add_(0, argY.long(), _addsub(op, dZ)) dY.scatter_add_(0, argY.long(), _addsub(op, dZ))
dY = _reduce_grad(dY, Y.shape) dY = _reduce_grad(dY, Y.shape)
print('jesus2')
else: # Y has no gradient else: # Y has no gradient
dY = None dY = None
return None, None, None, dX, dY return None, None, None, dX, dY
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment