Unverified Commit 22167f72 authored by Minjie Wang's avatar Minjie Wang Committed by GitHub
Browse files

[Refactor] Enable new kernel in all message passing APIs (#1953)

* WIP: frame refactor

* new frame

* simple update_all builtin

* move all subgraph routines into the same file

* sddmm & spmm schedule; node & edge udf

* degree bucketing

* some tricky 0deg corner cases

* bug in frame append

* merge test_hetero_basics and test_basics

* some code rearange

* fix test_heterograph

* add mean spmm

* enable all builtin combinations

* pass gpu test

* pass pytorch tests

* wip

* fix some pt debugging codes

* fix bug in mxnet backward

* pass all mxnet utests

* passed tf tests

* docstring

* lint

* lint

* fix broadcasting bugs

* add warning and clamp for mean reducer

* add test for zero-degree mean

* address comments

* lint

* small fix
parent 5d5436ba
......@@ -7,7 +7,7 @@ extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,_cy2,_cy3,backend,data,contrib
ignore=CVS,_cy2,_cy3,backend,data,contrib,_deprecate
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
......@@ -192,7 +192,7 @@ function-naming-style=snake_case
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,j,k,u,v,e,n,m,w,x,y,g,G,hg,fn,ex,Run,_,us,vs,gs,op,ty
good-names=i,j,k,u,v,e,n,m,w,x,y,z,g,G,hg,fn,ex,Run,_,us,vs,gs,op,ty
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
......
......@@ -333,53 +333,6 @@ def test_edge_softmax(idtype):
assert F.allclose(score.grad, grad_score)
print(score.grad[:10], grad_score[:10])
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['bipartite', 'homo'], exclude=['zero-degree', 'dglgraph']))
def test_edge_softmax2(idtype, g):
g = g.astype(idtype).to(F.ctx())
g = g.local_var()
g.srcdata.clear()
g.dstdata.clear()
g.edata.clear()
a1 = F.randn((g.number_of_edges(), 1)).requires_grad_()
a2 = a1.clone().detach().requires_grad_()
g.edata['s'] = a1
g.group_apply_edges('dst', lambda edges: {'ss':F.softmax(edges.data['s'], 1)})
g.edata['ss'].sum().backward()
builtin_sm = nn.edge_softmax(g, a2)
builtin_sm.sum().backward()
#print(a1.grad - a2.grad)
assert len(g.srcdata) == 0
assert len(g.dstdata) == 0
assert len(g.edata) == 2
assert F.allclose(a1.grad, a2.grad, rtol=1e-4, atol=1e-4) # Follow tolerance in unittest backend
"""
# Test 2
def generate_rand_graph(n, m=None, ctor=dgl.DGLGraph):
if m is None:
m = n
arr = (sp.sparse.random(m, n, density=0.1, format='coo') != 0).astype(np.int64)
return ctor(arr, readonly=True)
for g in [generate_rand_graph(50),
generate_rand_graph(50, ctor=dgl.graph),
generate_rand_graph(100, 50, ctor=dgl.bipartite)]:
a1 = F.randn((g.number_of_edges(), 1)).requires_grad_()
a2 = a1.clone().detach().requires_grad_()
g.edata['s'] = a1
g.group_apply_edges('dst', lambda edges: {'ss':F.softmax(edges.data['s'], 1)})
g.edata['ss'].sum().backward()
builtin_sm = nn.edge_softmax(g, a2)
builtin_sm.sum().backward()
print(a1.grad - a2.grad)
assert len(g.srcdata) == 0
assert len(g.dstdata) == 0
assert len(g.edata) == 2
assert F.allclose(a1.grad, a2.grad, rtol=1e-4, atol=1e-4) # Follow tolerance in unittest backend
"""
@parametrize_dtype
def test_partial_edge_softmax(idtype):
g = dgl.rand_graph(30, 900)
......
......@@ -209,31 +209,6 @@ def test_edge_softmax():
# checkout gradient
assert F.allclose(grads[0], grad_score)
print(grads[0][:10], grad_score[:10])
# Test 2
def generate_rand_graph(n):
arr = (sp.sparse.random(n, n, density=0.1, format='coo') != 0).astype(np.int64)
return dgl.DGLGraph(arr, readonly=True)
g = generate_rand_graph(50).to(F.ctx())
a1 = F.randn((g.number_of_edges(), 1))
a2 = tf.identity(a1)
with tf.GradientTape() as tape:
tape.watch(a1)
g.edata['s'] = a1
g.group_apply_edges('dst', lambda edges: {'ss':F.softmax(edges.data['s'], 1)})
loss = tf.reduce_sum(g.edata['ss'])
a1_grad = tape.gradient(loss, [a1])[0]
with tf.GradientTape() as tape:
tape.watch(a2)
builtin_sm = nn.edge_softmax(g, a2)
loss = tf.reduce_sum(builtin_sm)
a2_grad = tape.gradient(loss, [a2])[0]
print(a1_grad - a2_grad)
assert len(g.ndata) == 0
assert len(g.edata) == 2
assert F.allclose(a1_grad, a2_grad, rtol=1e-4, atol=1e-4) # Follow tolerance in unittest backend
def test_partial_edge_softmax():
g = dgl.DGLGraph().to(F.ctx())
......
......@@ -29,11 +29,7 @@ def get_cases(labels=None, exclude=[]):
cases.add(case)
return [fn() for fn in cases]
@register_case(['dglgraph', 'path'])
def dglgraph_path():
return dgl.DGLGraph(nx.path_graph(5))
@register_case(['bipartite'])
@register_case(['bipartite', 'zero-degree'])
def bipartite1():
return dgl.bipartite([(0, 0), (0, 1), (0, 4), (2, 1), (2, 4), (3, 3)])
......@@ -46,6 +42,10 @@ def graph0():
return dgl.graph(([0, 0, 0, 1, 1, 2, 2, 2, 3, 3, 3, 4, 6, 6, 7, 8, 9],
[4, 5, 1, 2, 4, 7, 9, 8 ,6, 4, 1, 0, 1, 0, 2, 3, 5]))
@register_case(['homo', 'zero-degree', 'homo-zero-degree'])
def bipartite1():
return dgl.graph([(0, 0), (0, 1), (0, 4), (2, 1), (2, 4), (3, 3)])
@register_case(['homo', 'has_feature'])
def graph1():
g = dgl.graph(([0, 0, 0, 1, 1, 2, 2, 2, 3, 3, 3, 4, 6, 6, 7, 8, 9],
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment