test_kernel.py 10.2 KB
Newer Older
1
2
3
import dgl
import dgl.function as fn
import networkx as nx
4
import numpy as np
5
6
7
import backend as F
from itertools import product

8
np.random.seed(42)
9
10
11
12
13
14
15
16

def udf_copy_src(edges):
    return {'m': edges.src['u']}


def udf_copy_edge(edges):
    return {'m': edges.data['e']}

17
18
def udf_mean(nodes):
    return {'r2': nodes.mailbox['m'].mean(1)}
19
20
21
22
23
24
25
26
27
28
29

def udf_sum(nodes):
    return {'r2': nodes.mailbox['m'].sum(1)}

def udf_max(nodes):
    return {'r2': F.max(nodes.mailbox['m'], 1)}


D1 = 5
D2 = 3
D3 = 4
30
31
builtin = {'sum': fn.sum, 'max': fn.max, 'mean': fn.mean}
udf_reduce = {'sum': udf_sum, 'max': udf_max, 'mean': udf_mean}
32
33
34
35
36
37
38
39
40
41
fill_value = {'sum': 0, 'max': float("-inf")}


def generate_feature(g, broadcast='none'):
    """Create graph with src, edge, dst feature. broadcast can be 'u',
    'e', 'v', 'none'
    """
    nv = g.number_of_nodes()
    ne = g.number_of_edges()
    if broadcast == 'e':
42
43
44
        u = F.tensor(np.random.uniform(-1, 1, (nv, D1, D2, D3)))
        e = F.tensor(np.random.uniform(-1, 1, (ne, D2, 1)))
        v = F.tensor(np.random.uniform(-1, 1, (nv, D1, D2, D3)))
45
    elif broadcast == 'u':
46
47
48
        u = F.tensor(np.random.uniform(-1, 1, (nv, D2, 1)))
        e = F.tensor(np.random.uniform(-1, 1, (ne, D1, D2, D3)))
        v = F.tensor(np.random.uniform(-1, 1, (nv, D1, D2, D3)))
49
    elif broadcast == 'v':
50
51
52
        u = F.tensor(np.random.uniform(-1, 1, (nv, D1, D2, D3)))
        e = F.tensor(np.random.uniform(-1, 1, (ne, D1, D2, D3)))
        v = F.tensor(np.random.uniform(-1, 1, (nv, D2, 1)))
53
    else:
54
55
56
        u = F.tensor(np.random.uniform(-1, 1, (nv, D1, D2, D3)))
        e = F.tensor(np.random.uniform(-1, 1, (ne, D1, D2, D3)))
        v = F.tensor(np.random.uniform(-1, 1, (nv, D1, D2, D3)))
57
58
59
60
    return u, v, e


def test_copy_src_reduce():
61
    def _test(red, partial):
62
        g = dgl.DGLGraph(nx.erdos_renyi_graph(100, 0.1))
63
64
65
        # NOTE(zihao): add self-loop to avoid zero-degree nodes.
        # https://github.com/dmlc/dgl/issues/761
        g.add_edges(g.nodes(), g.nodes())
66
        hu, hv, he = generate_feature(g, 'none')
67
68
        if partial:
            nid = F.tensor(list(range(0, 100, 2)))
69
70
71
72
73
74

        g.ndata['u'] = F.attach_grad(F.clone(hu))
        g.ndata['v'] = F.attach_grad(F.clone(hv))
        g.edata['e'] = F.attach_grad(F.clone(he))

        with F.record_grad():
75
76
77
78
79
80
            if partial:
                g.pull(nid, fn.copy_src(src='u', out='m'),
                       builtin[red](msg='m', out='r1'))
            else:
                g.update_all(fn.copy_src(src='u', out='m'),
                             builtin[red](msg='m', out='r1'))
81
82
83
84
85
86
87
88
89
90
            r1 = g.ndata['r1']
            F.backward(r1.sum())
            n_grad1 = F.grad(g.ndata['u'])

        # reset grad
        g.ndata['u'] = F.attach_grad(F.clone(hu))
        g.ndata['v'] = F.attach_grad(F.clone(hv))
        g.edata['e'] = F.attach_grad(F.clone(he))

        with F.record_grad():
91
92
93
94
            if partial:
                g.pull(nid, udf_copy_src, udf_reduce[red])
            else:
                g.update_all(udf_copy_src, udf_reduce[red])
95
96
97
98
99
100
101
            r2 = g.ndata['r2']
            F.backward(r2.sum())
            n_grad2 = F.grad(g.ndata['u'])

        assert F.allclose(r1, r2)
        assert(F.allclose(n_grad1, n_grad2))

102
103
104
105
106
107
108
109
    _test('sum', False)
    _test('max', False)
    _test('mean', False)
    _test('sum', True)
    _test('max', True)
    _test('mean', True)


110
111
112


def test_copy_edge_reduce():
113
    def _test(red, partial):
114
        g = dgl.DGLGraph(nx.erdos_renyi_graph(100, 0.1))
115
116
        # NOTE(zihao): add self-loop to avoid zero-degree nodes.
        g.add_edges(g.nodes(), g.nodes())
117
        hu, hv, he = generate_feature(g, 'none')
118
119
120
        if partial:
            nid = F.tensor(list(range(0, 100, 2)))

121
122
123
124
125
        g.ndata['u'] = F.attach_grad(F.clone(hu))
        g.ndata['v'] = F.attach_grad(F.clone(hv))
        g.edata['e'] = F.attach_grad(F.clone(he))

        with F.record_grad():
126
127
128
129
130
131
            if partial:
                g.pull(nid, fn.copy_edge(edge='e', out='m'),
                       builtin[red](msg='m', out='r1'))
            else:
                g.update_all(fn.copy_edge(edge='e', out='m'),
                             builtin[red](msg='m', out='r1'))
132
133
134
135
136
137
138
139
140
141
            r1 = g.ndata['r1']
            F.backward(r1.sum())
            e_grad1 = F.grad(g.edata['e'])

        # reset grad
        g.ndata['u'] = F.attach_grad(F.clone(hu))
        g.ndata['v'] = F.attach_grad(F.clone(hv))
        g.edata['e'] = F.attach_grad(F.clone(he))

        with F.record_grad():
142
143
144
145
            if partial:
                g.pull(nid, udf_copy_edge, udf_reduce[red])
            else:
                g.update_all(udf_copy_edge, udf_reduce[red])
146
147
148
149
150
151
152
            r2 = g.ndata['r2']
            F.backward(r2.sum())
            e_grad2 = F.grad(g.edata['e'])

        assert F.allclose(r1, r2)
        assert(F.allclose(e_grad1, e_grad2))

153
154
155
156
157
158
    _test('sum', False)
    _test('max', False)
    _test('mean', False)
    _test('sum', True)
    _test('max', True)
    _test('mean', True)
159
160
161


def test_all_binary_builtins():
162
163
    def _test(g, lhs, rhs, binary_op, reducer, partial, nid, broadcast='none'):
        # initialize node/edge features with uniform(-1, 1)
164
        hu, hv, he = generate_feature(g, broadcast)
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
        if binary_op == 'div':
            # op = div
            # lhs range: [-1, 1]
            # rhs range: [1, 2]
            # result range: [-1, 1]
            if rhs == 'u':
                hu = (hu + 3) / 2
            elif rhs == 'v':
                hv = (hv + 3) / 2
            elif rhs == 'e':
                he = (he + 3) / 2

        if binary_op == 'add' or binary_op == 'sub':
            # op = add, sub
            # lhs range: [-1/2, 1/2]
            # rhs range: [-1/2, 1/2]
            # result range: [-1, 1]
            hu = hu / 2
            hv = hv / 2
            he = he / 2

186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
        g.ndata['u'] = F.attach_grad(F.clone(hu))
        g.ndata['v'] = F.attach_grad(F.clone(hv))
        g.edata['e'] = F.attach_grad(F.clone(he))

        builtin_msg_name = "{}_{}_{}".format(lhs, binary_op, rhs)
        builtin_msg = getattr(fn, builtin_msg_name)
        builtin_red = getattr(fn, reducer)

        def target_feature_switch(g, target):
            if target == "u":
                return g.ndata["u"]
            elif target == "v":
                return g.ndata["v"]
            else:
                return g.edata["e"]

        with F.record_grad():
203
204
205
206
207
            if partial:
                g.pull(nid, builtin_msg(lhs, rhs, 'm'), builtin_red('m', 'r1'))
            else:
                g.update_all(builtin_msg(lhs, rhs, 'm'), builtin_red('m', 'r1'))
            r1 = g.ndata.pop('r1')
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
            F.backward(r1.sum())
            lhs_grad_1 = F.grad(target_feature_switch(g, lhs))
            rhs_grad_1 = F.grad(target_feature_switch(g, rhs))

        # reset grad
        g.ndata['u'] = F.attach_grad(F.clone(hu))
        g.ndata['v'] = F.attach_grad(F.clone(hv))
        g.edata['e'] = F.attach_grad(F.clone(he))

        def target_switch(edges, target):
            if target == "u":
                return edges.src
            elif target == "v":
                return edges.dst
            elif target == "e":
                return edges.data
            else:
                assert(0), "Unknown target {}".format(target)

        def mfunc(edges):
            op = getattr(F, binary_op)
229
230
231
232
233
234
235
236
237
            lhs_data = target_switch(edges, lhs)[lhs]
            rhs_data = target_switch(edges, rhs)[rhs]
            # NOTE(zihao): we need to do batched broadcast
            # e.g. (68, 3, 1) op (68, 5, 3, 4)
            while F.ndim(lhs_data) < F.ndim(rhs_data):
                lhs_data = F.unsqueeze(lhs_data, 1)
            while F.ndim(rhs_data) < F.ndim(lhs_data):
                rhs_data = F.unsqueeze(rhs_data, 1)
            return {"m": op(lhs_data, rhs_data)}
238
239
240
241
242
243

        def rfunc(nodes):
            op = getattr(F, reducer)
            return {"r2": op(nodes.mailbox['m'], 1)}

        with F.record_grad():
244
245
246
247
248
            if partial:
                g.pull(nid, mfunc, rfunc)
            else:
                g.update_all(mfunc, rfunc)
            r2 = g.ndata.pop('r2')
249
            F.backward(r2.sum(), F.tensor([1.]))
250
251
252
            lhs_grad_2 = F.grad(target_feature_switch(g, lhs))
            rhs_grad_2 = F.grad(target_feature_switch(g, rhs))

253
254
255
256
257
258
259
        if reducer == 'prod':
            rtol = 1e-2
            atol = 1e-2
        else:
            rtol = 1e-4
            atol = 1e-4

260
261
262
263
        def _print_error(a, b):
            print("ERROR: Test {}_{}_{}_{} {}".
                  format(lhs, binary_op, rhs, reducer, broadcast))
            print(a, b)
264
            for i, (x, y) in enumerate(zip(F.asnumpy(F.cpu(a)).flatten(), F.asnumpy(F.cpu(b)).flatten())):
265
266
267
                if not np.allclose(x, y, rtol, atol):
                    print('@{} {} v.s. {}'.format(i, x, y))

268
        if not F.allclose(r1, r2, rtol, atol):
269
            _print_error(r1, r2)
270
        assert F.allclose(r1, r2, rtol, atol)
271
272

        if not F.allclose(lhs_grad_1, lhs_grad_2, rtol, atol):
273
274
            print("left grad")
            _print_error(lhs_grad_1, lhs_grad_2)
275
        assert(F.allclose(lhs_grad_1, lhs_grad_2, rtol, atol))
276

277
        if not F.allclose(rhs_grad_1, rhs_grad_2, rtol, atol):
278
279
            print("right grad")
            _print_error(rhs_grad_1, rhs_grad_2)
280
        assert(F.allclose(rhs_grad_1, rhs_grad_2, rtol, atol))
281
282
283

    g = dgl.DGLGraph()
    g.add_nodes(20)
284
285
    # NOTE(zihao): add self-loop to avoid zero-degree nodes.
    g.add_edges(g.nodes(), g.nodes())
286
287
288
289
290
291
292
293
294
    for i in range(2, 18):
        g.add_edge(0, i)
        g.add_edge(1, i)
        g.add_edge(i, 18)
        g.add_edge(i, 19)
    g.add_edge(18, 0)
    g.add_edge(18, 1)
    g.add_edge(19, 0)
    g.add_edge(19, 1)
295
    nid = F.tensor([1, 3, 4, 5, 7, 10, 13, 17, 19])
296
297
298
299
300
    target = ["u", "v", "e"]
    for lhs, rhs in product(target, target):
        if lhs == rhs:
            continue
        for binary_op in ["add", "sub", "mul", "div"]:
301
            for reducer in ["sum", "max", "min", "prod", "mean"]:
302
                for broadcast in ["none", lhs, rhs]:
303
                    for partial in [False, True]:
304
305
                        _test(g, lhs, rhs, binary_op, reducer, partial, nid,
                              broadcast=broadcast)
306
307
308
309
310

if __name__ == '__main__':
    test_copy_src_reduce()
    test_copy_edge_reduce()
    test_all_binary_builtins()