test_basics.py 21.9 KB
Newer Older
1
import backend as F
2
import dgl
Quan (Andy) Gan's avatar
Quan (Andy) Gan committed
3
4
import numpy as np
import scipy.sparse as ssp
5
import networkx as nx
6
from dgl import DGLGraph
7
from collections import defaultdict as ddict
8
import unittest
9
from test_utils import parametrize_dtype
10
11
12
13

D = 5
reduce_msg_shapes = set()

14
def message_func(edges):
15
16
    assert F.ndim(edges.src['h']) == 2
    assert F.shape(edges.src['h'])[1] == D
17
    return {'m' : edges.src['h']}
18

19
20
def reduce_func(nodes):
    msgs = nodes.mailbox['m']
Minjie Wang's avatar
Minjie Wang committed
21
    reduce_msg_shapes.add(tuple(msgs.shape))
22
23
24
    assert F.ndim(msgs) == 3
    assert F.shape(msgs)[2] == D
    return {'accum' : F.sum(msgs, 1)}
Minjie Wang's avatar
Minjie Wang committed
25

26
27
def apply_node_func(nodes):
    return {'h' : nodes.data['h'] + nodes.data['accum']}
Minjie Wang's avatar
Minjie Wang committed
28

29
def generate_graph_old(grad=False):
30
    g = DGLGraph()
31
    g.add_nodes(10) # 10 nodes
32
    # create a graph where 0 is the source and 9 is the sink
Minjie Wang's avatar
Minjie Wang committed
33
    # 17 edges
34
35
36
37
38
    for i in range(1, 9):
        g.add_edge(0, i)
        g.add_edge(i, 9)
    # add a back flow from 9 to 0
    g.add_edge(9, 0)
39
    g = g.to(F.ctx())
40
41
42
43
44
45
    ncol = F.randn((10, D))
    ecol = F.randn((17, D))
    if grad:
        ncol = F.attach_grad(ncol)
        ecol = F.attach_grad(ecol)

46
47
    g.ndata['h'] = ncol
    g.edata['w'] = ecol
48
49
    g.set_n_initializer(dgl.init.zero_initializer)
    g.set_e_initializer(dgl.init.zero_initializer)
50
51
    return g

52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
def generate_graph(idtype, grad=False):
    '''
    s, d, eid
    0, 1, 0
    1, 9, 1
    0, 2, 2
    2, 9, 3
    0, 3, 4
    3, 9, 5
    0, 4, 6
    4, 9, 7
    0, 5, 8
    5, 9, 9
    0, 6, 10
    6, 9, 11
    0, 7, 12
    7, 9, 13
    0, 8, 14
    8, 9, 15
    9, 0, 16
    '''
    u = F.tensor([0, 1, 0, 2, 0, 3, 0, 4, 0, 5, 0, 6, 0, 7, 0, 8, 9])
    v = F.tensor([1, 9, 2, 9, 3, 9, 4, 9, 5, 9, 6, 9, 7, 9, 8, 9, 0])
    g = dgl.graph((u, v), idtype=idtype)
    assert g.device == F.ctx()
    ncol = F.randn((10, D))
    ecol = F.randn((17, D))
    if grad:
        ncol = F.attach_grad(ncol)
        ecol = F.attach_grad(ecol)

    g.ndata['h'] = ncol
    g.edata['w'] = ecol
    g.set_n_initializer(dgl.init.zero_initializer)
    g.set_e_initializer(dgl.init.zero_initializer)
    return g

def test_compatible():
    g = generate_graph_old()

@parametrize_dtype
def test_batch_setter_getter(idtype):
94
    def _pfc(x):
95
        return list(F.zerocopy_to_numpy(x)[:,0])
96
    g = generate_graph(idtype)
97
    # set all nodes
98
99
    g.ndata['h'] = F.zeros((10, D))
    assert F.allclose(g.ndata['h'], F.zeros((10, D)))
Minjie Wang's avatar
Minjie Wang committed
100
    # pop nodes
101
    old_len = len(g.ndata)
102
    g.ndata.pop('h')
103
    assert len(g.ndata) == old_len - 1
104
    g.ndata['h'] = F.zeros((10, D))
105
    # set partial nodes
106
    u = F.tensor([1, 3, 5], g.idtype)
107
    g.nodes[u].data['h'] = F.ones((3, D))
108
    assert _pfc(g.ndata['h']) == [0., 1., 0., 1., 0., 1., 0., 0., 0., 0.]
109
    # get partial nodes
110
    u = F.tensor([1, 2, 3], g.idtype)
111
    assert _pfc(g.nodes[u].data['h']) == [1., 0., 1.]
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133

    '''
    s, d, eid
    0, 1, 0
    1, 9, 1
    0, 2, 2
    2, 9, 3
    0, 3, 4
    3, 9, 5
    0, 4, 6
    4, 9, 7
    0, 5, 8
    5, 9, 9
    0, 6, 10
    6, 9, 11
    0, 7, 12
    7, 9, 13
    0, 8, 14
    8, 9, 15
    9, 0, 16
    '''
    # set all edges
134
    g.edata['l'] = F.zeros((17, D))
135
    assert _pfc(g.edata['l']) == [0.] * 17
Minjie Wang's avatar
Minjie Wang committed
136
    # pop edges
137
    old_len = len(g.edata)
138
    g.edata.pop('l')
139
    assert len(g.edata) == old_len - 1
140
    g.edata['l'] = F.zeros((17, D))
Minjie Wang's avatar
Minjie Wang committed
141
    # set partial edges (many-many)
142
143
    u = F.tensor([0, 0, 2, 5, 9], g.idtype)
    v = F.tensor([1, 3, 9, 9, 0], g.idtype)
144
    g.edges[u, v].data['l'] = F.ones((5, D))
Minjie Wang's avatar
Minjie Wang committed
145
146
    truth = [0.] * 17
    truth[0] = truth[4] = truth[3] = truth[9] = truth[16] = 1.
147
    assert _pfc(g.edata['l']) == truth
Minjie Wang's avatar
Minjie Wang committed
148
    # set partial edges (many-one)
149
150
    u = F.tensor([3, 4, 6], g.idtype)
    v = F.tensor([9], g.idtype)
151
    g.edges[u, v].data['l'] = F.ones((3, D))
Minjie Wang's avatar
Minjie Wang committed
152
    truth[5] = truth[7] = truth[11] = 1.
153
    assert _pfc(g.edata['l']) == truth
Minjie Wang's avatar
Minjie Wang committed
154
    # set partial edges (one-many)
155
156
    u = F.tensor([0], g.idtype)
    v = F.tensor([4, 5, 6], g.idtype)
157
    g.edges[u, v].data['l'] = F.ones((3, D))
Minjie Wang's avatar
Minjie Wang committed
158
    truth[6] = truth[8] = truth[10] = 1.
159
    assert _pfc(g.edata['l']) == truth
Minjie Wang's avatar
Minjie Wang committed
160
    # get partial edges (many-many)
161
162
    u = F.tensor([0, 6, 0], g.idtype)
    v = F.tensor([6, 9, 7], g.idtype)
163
    assert _pfc(g.edges[u, v].data['l']) == [1., 1., 0.]
Minjie Wang's avatar
Minjie Wang committed
164
    # get partial edges (many-one)
165
166
    u = F.tensor([5, 6, 7], g.idtype)
    v = F.tensor([9], g.idtype)
167
    assert _pfc(g.edges[u, v].data['l']) == [1., 1., 0.]
Minjie Wang's avatar
Minjie Wang committed
168
    # get partial edges (one-many)
169
170
    u = F.tensor([0], g.idtype)
    v = F.tensor([3, 4, 5], g.idtype)
171
    assert _pfc(g.edges[u, v].data['l']) == [1., 1., 1.]
172

173
174
175
@parametrize_dtype
def test_batch_setter_autograd(idtype):
    g = generate_graph(idtype, grad=True)
176
    h1 = g.ndata['h']
177
    # partial set
178
    v = F.tensor([1, 2, 8], g.idtype)
179
180
181
182
    hh = F.attach_grad(F.zeros((len(v), D)))
    with F.record_grad():
        g.nodes[v].data['h'] = hh
        h2 = g.ndata['h']
VoVAllen's avatar
VoVAllen committed
183
        F.backward(h2, F.ones((10, D)) * 2)
184
185
    assert F.array_equal(F.grad(h1)[:,0], F.tensor([2., 0., 0., 2., 2., 2., 2., 2., 0., 2.]))
    assert F.array_equal(F.grad(hh)[:,0], F.tensor([2., 2., 2.]))
186

187
def _test_nx_conversion():
188
189
190
    # check conversion between networkx and DGLGraph

    def _check_nx_feature(nxg, nf, ef):
Minjie Wang's avatar
Minjie Wang committed
191
192
        # check node and edge feature of nxg
        # this is used to check to_networkx
193
194
195
196
197
198
199
        num_nodes = len(nxg)
        num_edges = nxg.size()
        if num_nodes > 0:
            node_feat = ddict(list)
            for nid, attr in nxg.nodes(data=True):
                assert len(attr) == len(nf)
                for k in nxg.nodes[nid]:
200
                    node_feat[k].append(F.unsqueeze(attr[k], 0))
201
            for k in node_feat:
202
203
                feat = F.cat(node_feat[k], 0)
                assert F.allclose(feat, nf[k])
204
205
206
207
208
209
210
211
        else:
            assert len(nf) == 0
        if num_edges > 0:
            edge_feat = ddict(lambda: [0] * num_edges)
            for u, v, attr in nxg.edges(data=True):
                assert len(attr) == len(ef) + 1 # extra id
                eid = attr['id']
                for k in ef:
212
                    edge_feat[k][eid] = F.unsqueeze(attr[k], 0)
213
            for k in edge_feat:
214
215
                feat = F.cat(edge_feat[k], 0)
                assert F.allclose(feat, ef[k])
216
217
218
        else:
            assert len(ef) == 0

219
220
221
222
223
    n1 = F.randn((5, 3))
    n2 = F.randn((5, 10))
    n3 = F.randn((5, 4))
    e1 = F.randn((4, 5))
    e2 = F.randn((4, 7))
224
    g = DGLGraph()
225
226
227
228
229
230
231
232
233
234
235
    g.add_nodes(5)
    g.add_edges([0,1,3,4], [2,4,0,3])
    g.ndata.update({'n1': n1, 'n2': n2, 'n3': n3})
    g.edata.update({'e1': e1, 'e2': e2})

    # convert to networkx
    nxg = g.to_networkx(node_attrs=['n1', 'n3'], edge_attrs=['e1', 'e2'])
    assert len(nxg) == 5
    assert nxg.size() == 4
    _check_nx_feature(nxg, {'n1': n1, 'n3': n3}, {'e1': e1, 'e2': e2})

Minjie Wang's avatar
Minjie Wang committed
236
    # convert to DGLGraph, nx graph has id in edge feature
237
    # use id feature to test non-tensor copy
238
    g = dgl.from_networkx(nxg, node_attrs=['n1'], edge_attrs=['e1', 'id'])
Minjie Wang's avatar
Minjie Wang committed
239
    # check graph size
240
241
    assert g.number_of_nodes() == 5
    assert g.number_of_edges() == 4
Minjie Wang's avatar
Minjie Wang committed
242
243
244
245
246
    # check number of features
    # test with existing dglgraph (so existing features should be cleared)
    assert len(g.ndata) == 1
    assert len(g.edata) == 2
    # check feature values
247
    assert F.allclose(g.ndata['n1'], n1)
Minjie Wang's avatar
Minjie Wang committed
248
    # with id in nx edge feature, e1 should follow original order
249
    assert F.allclose(g.edata['e1'], e1)
250
    assert F.array_equal(F.astype(g.edata['id'], F.int64), F.copy_to(F.arange(0, 4), F.cpu()))
251

Minjie Wang's avatar
Minjie Wang committed
252
    # test conversion after modifying DGLGraph
253
    g.edata.pop('id') # pop id so we don't need to provide id when adding edges
254
255
    new_n = F.randn((2, 3))
    new_e = F.randn((3, 5))
256
257
258
    g.add_nodes(2, data={'n1': new_n})
    # add three edges, one is a multi-edge
    g.add_edges([3, 6, 0], [4, 5, 2], data={'e1': new_e})
259
260
    n1 = F.cat((n1, new_n), 0)
    e1 = F.cat((e1, new_e), 0)
261
262
263
264
265
266
    # convert to networkx again
    nxg = g.to_networkx(node_attrs=['n1'], edge_attrs=['e1'])
    assert len(nxg) == 7
    assert nxg.size() == 7
    _check_nx_feature(nxg, {'n1': n1}, {'e1': e1})

Minjie Wang's avatar
Minjie Wang committed
267
268
269
270
271
    # now test convert from networkx without id in edge feature
    # first pop id in edge feature
    for _, _, attr in nxg.edges(data=True):
        attr.pop('id')
    # test with a new graph
272
    g = dgl.from_networkx(nxg, node_attrs=['n1'], edge_attrs=['e1'])
Minjie Wang's avatar
Minjie Wang committed
273
274
275
276
277
278
279
    # check graph size
    assert g.number_of_nodes() == 7
    assert g.number_of_edges() == 7
    # check number of features
    assert len(g.ndata) == 1
    assert len(g.edata) == 1
    # check feature values
280
    assert F.allclose(g.ndata['n1'], n1)
Minjie Wang's avatar
Minjie Wang committed
281
282
283
    # edge feature order follows nxg.edges()
    edge_feat = []
    for _, _, attr in nxg.edges(data=True):
284
285
286
        edge_feat.append(F.unsqueeze(attr['e1'], 0))
    edge_feat = F.cat(edge_feat, 0)
    assert F.allclose(g.edata['e1'], edge_feat)
Minjie Wang's avatar
Minjie Wang committed
287

288
289
290
291
292
    # Test converting from a networkx graph whose nodes are
    # not labeled with consecutive-integers.
    nxg = nx.cycle_graph(5)
    nxg.remove_nodes_from([0, 4])
    for u in nxg.nodes():
VoVAllen's avatar
VoVAllen committed
293
        nxg.nodes[u]['h'] = F.tensor([u])
294
295
296
    for u, v, d in nxg.edges(data=True):
        d['h'] = F.tensor([u, v])

297
    g = dgl.from_networkx(nxg, node_attrs=['h'], edge_attrs=['h'])
298
299
300
301
302
303
304
305
    assert g.number_of_nodes() == 3
    assert g.number_of_edges() == 4
    assert g.has_edge_between(0, 1)
    assert g.has_edge_between(1, 2)
    assert F.allclose(g.ndata['h'], F.tensor([[1.], [2.], [3.]]))
    assert F.allclose(g.edata['h'], F.tensor([[1., 2.], [1., 2.],
                                              [2., 3.], [2., 3.]]))

306
307
@parametrize_dtype
def test_apply_nodes(idtype):
308
309
    def _upd(nodes):
        return {'h' : nodes.data['h'] * 2}
310
    g = generate_graph(idtype)
311
    old = g.ndata['h']
312
    g.apply_nodes(_upd)
313
    assert F.allclose(old * 2, g.ndata['h'])
314
    u = F.tensor([0, 3, 4, 6], g.idtype)
315
    g.apply_nodes(lambda nodes : {'h' : nodes.data['h'] * 0.}, u)
316
    assert F.allclose(F.gather_row(g.ndata['h'], u), F.zeros((4, D)))
317

318
319
@parametrize_dtype
def test_apply_edges(idtype):
320
321
    def _upd(edges):
        return {'w' : edges.data['w'] * 2}
322
    g = generate_graph(idtype)
323
    old = g.edata['w']
324
    g.apply_edges(_upd)
325
    assert F.allclose(old * 2, g.edata['w'])
326
327
    u = F.tensor([0, 0, 0, 4, 5, 6], g.idtype)
    v = F.tensor([1, 2, 3, 9, 9, 9], g.idtype)
328
    g.apply_edges(lambda edges : {'w' : edges.data['w'] * 0.}, (u, v))
329
    eid = F.tensor(g.edge_ids(u, v))
330
    assert F.allclose(F.gather_row(g.edata['w'], eid), F.zeros((6, D)))
331

332
333
334
@parametrize_dtype
def test_update_routines(idtype):
    g = generate_graph(idtype)
335

336
    # send_and_recv
337
    reduce_msg_shapes.clear()
338
339
    u = [0, 0, 0, 4, 5, 6]
    v = [1, 2, 3, 9, 9, 9]
340
    g.send_and_recv((u, v), message_func, reduce_func, apply_node_func)
341
342
    assert(reduce_msg_shapes == {(1, 3, D), (3, 1, D)})
    reduce_msg_shapes.clear()
343
344
345
    try:
        g.send_and_recv([u, v])
        assert False
346
    except:
347
        pass
348

349
    # pull
350
    v = F.tensor([1, 2, 3, 9], g.idtype)
351
    reduce_msg_shapes.clear()
352
    g.pull(v, message_func, reduce_func, apply_node_func)
353
354
355
    assert(reduce_msg_shapes == {(1, 8, D), (3, 1, D)})
    reduce_msg_shapes.clear()

356
    # push
357
    v = F.tensor([0, 1, 2, 3], g.idtype)
358
    reduce_msg_shapes.clear()
359
    g.push(v, message_func, reduce_func, apply_node_func)
360
361
362
363
364
    assert(reduce_msg_shapes == {(1, 3, D), (8, 1, D)})
    reduce_msg_shapes.clear()

    # update_all
    reduce_msg_shapes.clear()
365
    g.update_all(message_func, reduce_func, apply_node_func)
366
367
368
    assert(reduce_msg_shapes == {(1, 8, D), (9, 1, D)})
    reduce_msg_shapes.clear()

369
370
@parametrize_dtype
def test_update_all_0deg(idtype):
371
    # test#1
372
    g = dgl.graph([(1,0), (2,0), (3,0), (4,0)], idtype=idtype, device=F.ctx())
373
374
375
    def _message(edges):
        return {'m' : edges.src['h']}
    def _reduce(nodes):
376
        return {'x' : nodes.data['h'] + F.sum(nodes.mailbox['m'], 1)}
377
    def _apply(nodes):
378
        return {'x' : nodes.data['x'] * 2}
379
    def _init2(shape, dtype, ctx, ids):
380
        return 2 + F.zeros(shape, dtype, ctx)
381
    g.set_n_initializer(_init2, 'x')
382
    old_repr = F.randn((5, 5))
383
    g.ndata['h'] = old_repr
384
    g.update_all(_message, _reduce, _apply)
385
    new_repr = g.ndata['x']
386
387
    # the first row of the new_repr should be the sum of all the node
    # features; while the 0-deg nodes should be initialized by the
388
    # initializer and applied with UDF.
389
390
    assert F.allclose(new_repr[1:], 2*(2+F.zeros((4,5))))
    assert F.allclose(new_repr[0], 2 * F.sum(old_repr, 0))
391

392
    # test#2: graph with no edge
393
    g = dgl.graph([], num_nodes=5, idtype=idtype, device=F.ctx())
394
    g.ndata['h'] = old_repr
395
    g.update_all(_message, _reduce, lambda nodes : {'h' : nodes.data['h'] * 2})
396
397
    new_repr = g.ndata['h']
    # should fallback to apply
398
    assert F.allclose(new_repr, 2*old_repr)
399

400
401
402
@parametrize_dtype
def test_pull_0deg(idtype):
    g = dgl.graph([(0,1)], idtype=idtype, device=F.ctx())
403
404
405
    def _message(edges):
        return {'m' : edges.src['h']}
    def _reduce(nodes):
406
        return {'x' : nodes.data['h'] + F.sum(nodes.mailbox['m'], 1)}
407
    def _apply(nodes):
408
        return {'x' : nodes.data['x'] * 2}
409
    def _init2(shape, dtype, ctx, ids):
410
        return 2 + F.zeros(shape, dtype, ctx)
411
    g.set_n_initializer(_init2, 'x')
412
    # test#1: pull both 0deg and non-0deg nodes
413
    old = F.randn((2, 5))
414
    g.ndata['h'] = old
415
    g.pull([0, 1], _message, _reduce, _apply)
416
    new = g.ndata['x']
417
    # 0deg check: initialized with the func and got applied
418
    assert F.allclose(new[0], F.full_1d(5, 4, dtype=F.float32))
419
    # non-0deg check
420
    assert F.allclose(new[1], F.sum(old, 0) * 2)
421
422

    # test#2: pull only 0deg node
423
    old = F.randn((2, 5))
424
    g.ndata['h'] = old
425
426
    g.pull(0, _message, _reduce, lambda nodes : {'h' : nodes.data['h'] * 2})
    new = g.ndata['h']
427
    # 0deg check: fallback to apply
428
    assert F.allclose(new[0], 2*old[0])
429
    # non-0deg check: not touched
430
    assert F.allclose(new[1], old[1])
431

432
433
434
435
436
def test_dynamic_addition():
    N = 3
    D = 1

    g = DGLGraph()
437
    g = g.to(F.ctx())
438
439
440

    # Test node addition
    g.add_nodes(N)
441
442
    g.ndata.update({'h1': F.randn((N, D)),
                    'h2': F.randn((N, D))})
443
    g.add_nodes(3)
444
    assert g.ndata['h1'].shape[0] == g.ndata['h2'].shape[0] == N + 3
445
446
447
448

    # Test edge addition
    g.add_edge(0, 1)
    g.add_edge(1, 0)
449
450
    g.edata.update({'h1': F.randn((2, D)),
                    'h2': F.randn((2, D))})
451
    assert g.edata['h1'].shape[0] == g.edata['h2'].shape[0] == 2
452
453

    g.add_edges([0, 2], [2, 0])
454
    g.edata['h1'] = F.randn((4, D))
455
    assert g.edata['h1'].shape[0] == g.edata['h2'].shape[0] == 4
456
457

    g.add_edge(1, 2)
458
    g.edges[4].data['h1'] = F.randn((1, D))
459
    assert g.edata['h1'].shape[0] == g.edata['h2'].shape[0] == 5
460

461
    # test add edge with part of the features
462
    g.add_edge(2, 1, {'h1': F.randn((1, D))})
463
464
    assert len(g.edata['h1']) == len(g.edata['h2'])

465

466
467
468
@parametrize_dtype
def test_repr(idtype):
    g = dgl.graph([(0,1), (0,2), (1,2)], num_nodes=10, idtype=idtype, device=F.ctx())
469
    repr_string = g.__repr__()
Minjie Wang's avatar
Minjie Wang committed
470
    print(repr_string)
471
472
473
    g.ndata['x'] = F.zeros((10, 5))
    g.edata['y'] = F.zeros((3, 4))
    repr_string = g.__repr__()
Minjie Wang's avatar
Minjie Wang committed
474
    print(repr_string)
Haibin Lin's avatar
Haibin Lin committed
475

476
477
478
@parametrize_dtype
def test_local_var(idtype):
    g = dgl.graph([(0,1), (1,2), (2,3), (3,4)], idtype=idtype, device=F.ctx())
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
    g.ndata['h'] = F.zeros((g.number_of_nodes(), 3))
    g.edata['w'] = F.zeros((g.number_of_edges(), 4))
    # test override
    def foo(g):
        g = g.local_var()
        g.ndata['h'] = F.ones((g.number_of_nodes(), 3))
        g.edata['w'] = F.ones((g.number_of_edges(), 4))
    foo(g)
    assert F.allclose(g.ndata['h'], F.zeros((g.number_of_nodes(), 3)))
    assert F.allclose(g.edata['w'], F.zeros((g.number_of_edges(), 4)))
    # test out-place update
    def foo(g):
        g = g.local_var()
        g.nodes[[2, 3]].data['h'] = F.ones((2, 3))
        g.edges[[2, 3]].data['w'] = F.ones((2, 4))
    foo(g)
    assert F.allclose(g.ndata['h'], F.zeros((g.number_of_nodes(), 3)))
    assert F.allclose(g.edata['w'], F.zeros((g.number_of_edges(), 4)))
    # test out-place update 2
    def foo(g):
        g = g.local_var()
        g.apply_nodes(lambda nodes: {'h' : nodes.data['h'] + 10}, [2, 3])
        g.apply_edges(lambda edges: {'w' : edges.data['w'] + 10}, [2, 3])
    foo(g)
    assert F.allclose(g.ndata['h'], F.zeros((g.number_of_nodes(), 3)))
    assert F.allclose(g.edata['w'], F.zeros((g.number_of_edges(), 4)))
    # test auto-pop
    def foo(g):
        g = g.local_var()
        g.ndata['hh'] = F.ones((g.number_of_nodes(), 3))
        g.edata['ww'] = F.ones((g.number_of_edges(), 4))
    foo(g)
    assert 'hh' not in g.ndata
    assert 'ww' not in g.edata

Mufei Li's avatar
Mufei Li committed
514
    # test initializer1
515
    g = dgl.graph([(0,1), (1,1)], idtype=idtype, device=F.ctx())
Mufei Li's avatar
Mufei Li committed
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
    g.set_n_initializer(dgl.init.zero_initializer)
    def foo(g):
        g = g.local_var()
        g.nodes[0].data['h'] = F.ones((1, 1))
        assert F.allclose(g.ndata['h'], F.tensor([[1.], [0.]]))
    foo(g)
    # test initializer2
    def foo_e_initializer(shape, dtype, ctx, id_range):
        return F.ones(shape)
    g.set_e_initializer(foo_e_initializer, field='h')
    def foo(g):
        g = g.local_var()
        g.edges[0, 1].data['h'] = F.ones((1, 1))
        assert F.allclose(g.edata['h'], F.ones((2, 1)))
        g.edges[0, 1].data['w'] = F.ones((1, 1))
        assert F.allclose(g.edata['w'], F.tensor([[1.], [0.]]))
    foo(g)

534
535
536
@parametrize_dtype
def test_local_scope(idtype):
    g = dgl.graph([(0,1), (1,2), (2,3), (3,4)], idtype=idtype, device=F.ctx())
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
    g.ndata['h'] = F.zeros((g.number_of_nodes(), 3))
    g.edata['w'] = F.zeros((g.number_of_edges(), 4))
    # test override
    def foo(g):
        with g.local_scope():
            g.ndata['h'] = F.ones((g.number_of_nodes(), 3))
            g.edata['w'] = F.ones((g.number_of_edges(), 4))
    foo(g)
    assert F.allclose(g.ndata['h'], F.zeros((g.number_of_nodes(), 3)))
    assert F.allclose(g.edata['w'], F.zeros((g.number_of_edges(), 4)))
    # test out-place update
    def foo(g):
        with g.local_scope():
            g.nodes[[2, 3]].data['h'] = F.ones((2, 3))
            g.edges[[2, 3]].data['w'] = F.ones((2, 4))
    foo(g)
    assert F.allclose(g.ndata['h'], F.zeros((g.number_of_nodes(), 3)))
    assert F.allclose(g.edata['w'], F.zeros((g.number_of_edges(), 4)))
    # test out-place update 2
    def foo(g):
        with g.local_scope():
            g.apply_nodes(lambda nodes: {'h' : nodes.data['h'] + 10}, [2, 3])
            g.apply_edges(lambda edges: {'w' : edges.data['w'] + 10}, [2, 3])
    foo(g)
    assert F.allclose(g.ndata['h'], F.zeros((g.number_of_nodes(), 3)))
    assert F.allclose(g.edata['w'], F.zeros((g.number_of_edges(), 4)))
    # test auto-pop
    def foo(g):
        with g.local_scope():
            g.ndata['hh'] = F.ones((g.number_of_nodes(), 3))
            g.edata['ww'] = F.ones((g.number_of_edges(), 4))
    foo(g)
    assert 'hh' not in g.ndata
    assert 'ww' not in g.edata

    # test nested scope
    def foo(g):
        with g.local_scope():
            g.ndata['hh'] = F.ones((g.number_of_nodes(), 3))
            g.edata['ww'] = F.ones((g.number_of_edges(), 4))
            with g.local_scope():
                g.ndata['hhh'] = F.ones((g.number_of_nodes(), 3))
                g.edata['www'] = F.ones((g.number_of_edges(), 4))
            assert 'hhh' not in g.ndata
            assert 'www' not in g.edata
    foo(g)
    assert 'hh' not in g.ndata
    assert 'ww' not in g.edata
585

Mufei Li's avatar
Mufei Li committed
586
    # test initializer1
587
    g = dgl.graph([(0,1), (1,1)], idtype=idtype, device=F.ctx())
Mufei Li's avatar
Mufei Li committed
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
    g.set_n_initializer(dgl.init.zero_initializer)
    def foo(g):
        with g.local_scope():
            g.nodes[0].data['h'] = F.ones((1, 1))
            assert F.allclose(g.ndata['h'], F.tensor([[1.], [0.]]))
    foo(g)
    # test initializer2
    def foo_e_initializer(shape, dtype, ctx, id_range):
        return F.ones(shape)
    g.set_e_initializer(foo_e_initializer, field='h')
    def foo(g):
        with g.local_scope():
            g.edges[0, 1].data['h'] = F.ones((1, 1))
            assert F.allclose(g.edata['h'], F.ones((2, 1)))
            g.edges[0, 1].data['w'] = F.ones((1, 1))
            assert F.allclose(g.edata['w'], F.tensor([[1.], [0.]]))
    foo(g)

606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
@parametrize_dtype
def test_isolated_nodes(idtype):
    g = dgl.graph([(0, 1), (1, 2)], num_nodes=5, idtype=idtype, device=F.ctx())
    assert g.number_of_nodes() == 5

    # Test backward compatibility
    g = dgl.graph([(0, 1), (1, 2)], card=5, idtype=idtype, device=F.ctx())
    assert g.number_of_nodes() == 5

    g = dgl.bipartite([(0, 2), (0, 3), (1, 2)], 'user', 'plays',
                      'game', num_nodes=(5, 7), idtype=idtype, device=F.ctx())
    assert g.idtype == idtype
    assert g.number_of_nodes('user') == 5
    assert g.number_of_nodes('game') == 7

    # Test backward compatibility
    g = dgl.bipartite([(0, 2), (0, 3), (1, 2)], 'user', 'plays',
                      'game', card=(5, 7), idtype=idtype, device=F.ctx())
    assert g.idtype == idtype
    assert g.number_of_nodes('user') == 5
    assert g.number_of_nodes('game') == 7

@parametrize_dtype
def test_send_multigraph(idtype):
    g = dgl.graph([(0,1), (0,1), (0,1), (2,1)], idtype=idtype, device=F.ctx())

    def _message_a(edges):
        return {'a': edges.data['a']}
    def _message_b(edges):
        return {'a': edges.data['a'] * 3}
    def _reduce(nodes):
        return {'a': F.max(nodes.mailbox['a'], 1)}

    def answer(*args):
        return F.max(F.stack(args, 0), 0)

    assert g.is_multigraph

    # send by eid
    old_repr = F.randn((4, 5))
    # send_and_recv_on
    g.ndata['a'] = F.zeros((3, 5))
    g.edata['a'] = old_repr
    g.send_and_recv([0, 2, 3], message_func=_message_a, reduce_func=_reduce)
    new_repr = g.ndata['a']
    assert F.allclose(new_repr[1], answer(old_repr[0], old_repr[2], old_repr[3]))
    assert F.allclose(new_repr[[0, 2]], F.zeros((2, 5)))

@parametrize_dtype
def test_issue_1088(idtype):
    # This test ensures that message passing on a heterograph with one edge type
    # would not crash (GitHub issue #1088).
    import dgl.function as fn
    g = dgl.heterograph({('U', 'E', 'V'): ([0, 1, 2], [1, 2, 3])}, idtype=idtype, device=F.ctx())
    g.nodes['U'].data['x'] = F.randn((3, 3))
    g.update_all(fn.copy_u('x', 'm'), fn.sum('m', 'y'))