test_nn.py 32.3 KB
Newer Older
1
2
3
4
import torch as th
import networkx as nx
import dgl
import dgl.nn.pytorch as nn
5
import dgl.function as fn
6
import backend as F
7
import pytest
8
9
from test_utils.graph_cases import get_cases, random_graph, random_bipartite, random_dglgraph
from test_utils import parametrize_dtype
10
11
from copy import deepcopy

12
13
import scipy as sp

14
15
16
17
18
def _AXWb(A, X, W, b):
    X = th.matmul(X, W)
    Y = th.matmul(A, X.view(X.shape[0], -1)).view_as(X)
    return Y + b

19
20
def test_graph_conv0():
    g = dgl.DGLGraph(nx.path_graph(3)).to(F.ctx())
21
    ctx = F.ctx()
22
    adj = g.adjacency_matrix(transpose=False, ctx=ctx)
23

24
    conv = nn.GraphConv(5, 2, norm='none', bias=True)
25
    conv = conv.to(ctx)
26
27
    print(conv)
    # test#1: basic
28
    h0 = F.ones((3, 5))
29
    h1 = conv(g, h0)
30
31
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
32
    assert F.allclose(h1, _AXWb(adj, h0, conv.weight, conv.bias))
33
    # test#2: more-dim
34
    h0 = F.ones((3, 5, 5))
35
    h1 = conv(g, h0)
36
37
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
38
    assert F.allclose(h1, _AXWb(adj, h0, conv.weight, conv.bias))
39
40

    conv = nn.GraphConv(5, 2)
41
    conv = conv.to(ctx)
42
    # test#3: basic
43
    h0 = F.ones((3, 5))
44
    h1 = conv(g, h0)
45
46
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
47
    # test#4: basic
48
    h0 = F.ones((3, 5, 5))
49
    h1 = conv(g, h0)
50
51
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
52
53

    conv = nn.GraphConv(5, 2)
54
    conv = conv.to(ctx)
55
    # test#3: basic
56
    h0 = F.ones((3, 5))
57
    h1 = conv(g, h0)
58
59
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
60
    # test#4: basic
61
    h0 = F.ones((3, 5, 5))
62
    h1 = conv(g, h0)
63
64
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
65
66
67
68
69

    # test rest_parameters
    old_weight = deepcopy(conv.weight.data)
    conv.reset_parameters()
    new_weight = conv.weight.data
70
    assert not F.allclose(old_weight, new_weight)
71

72
73
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['homo', 'bipartite'], exclude=['zero-degree', 'dglgraph']))
74
75
76
@pytest.mark.parametrize('norm', ['none', 'both', 'right'])
@pytest.mark.parametrize('weight', [True, False])
@pytest.mark.parametrize('bias', [True, False])
77
78
79
def test_graph_conv(idtype, g, norm, weight, bias):
    # Test one tensor input
    g = g.astype(idtype).to(F.ctx())
80
81
    conv = nn.GraphConv(5, 2, norm=norm, weight=weight, bias=bias).to(F.ctx())
    ext_w = F.randn((5, 2)).to(F.ctx())
82
83
    nsrc = g.number_of_src_nodes()
    ndst = g.number_of_dst_nodes()
84
85
    h = F.randn((nsrc, 5)).to(F.ctx())
    if weight:
86
        h_out = conv(g, h)
87
    else:
88
89
90
        h_out = conv(g, h, weight=ext_w)
    assert h_out.shape == (ndst, 2)

91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['bipartite'], exclude=['zero-degree', 'dglgraph']))
@pytest.mark.parametrize('norm', ['none', 'both', 'right'])
@pytest.mark.parametrize('weight', [True, False])
@pytest.mark.parametrize('bias', [True, False])
def test_graph_conv_bi(idtype, g, norm, weight, bias):
    # Test a pair of tensor inputs
    g = g.astype(idtype).to(F.ctx())
    conv = nn.GraphConv(5, 2, norm=norm, weight=weight, bias=bias).to(F.ctx())
    ext_w = F.randn((5, 2)).to(F.ctx())
    nsrc = g.number_of_src_nodes()
    ndst = g.number_of_dst_nodes()
    h = F.randn((nsrc, 5)).to(F.ctx())
    h_dst = F.randn((ndst, 2)).to(F.ctx())
    if weight:
        h_out = conv(g, (h, h_dst))
    else:
        h_out = conv(g, (h, h_dst), weight=ext_w)
    assert h_out.shape == (ndst, 2)
110

111
112
113
114
115
116
117
118
119
120
121
122
def _S2AXWb(A, N, X, W, b):
    X1 = X * N
    X1 = th.matmul(A, X1.view(X1.shape[0], -1))
    X1 = X1 * N
    X2 = X1 * N
    X2 = th.matmul(A, X2.view(X2.shape[0], -1))
    X2 = X2 * N
    X = th.cat([X, X1, X2], dim=-1)
    Y = th.matmul(X, W.rot90())

    return Y + b

123
def test_tagconv():
124
    g = dgl.DGLGraph(nx.path_graph(3))
125
    g = g.to(F.ctx())
126
    ctx = F.ctx()
127
    adj = g.adjacency_matrix(transpose=False, ctx=ctx)
128
129
    norm = th.pow(g.in_degrees().float(), -0.5)

130
    conv = nn.TAGConv(5, 2, bias=True)
131
    conv = conv.to(ctx)
132
133
134
135
    print(conv)

    # test#1: basic
    h0 = F.ones((3, 5))
136
    h1 = conv(g, h0)
137
138
139
140
141
142
143
    assert len(g.ndata) == 0
    assert len(g.edata) == 0
    shp = norm.shape + (1,) * (h0.dim() - 1)
    norm = th.reshape(norm, shp).to(ctx)

    assert F.allclose(h1, _S2AXWb(adj, norm, h0, conv.lin.weight, conv.lin.bias))

144
    conv = nn.TAGConv(5, 2)
145
    conv = conv.to(ctx)
146

147
148
    # test#2: basic
    h0 = F.ones((3, 5))
149
    h1 = conv(g, h0)
150
    assert h1.shape[-1] == 2
151

152
    # test reset_parameters
153
154
155
156
157
    old_weight = deepcopy(conv.lin.weight.data)
    conv.reset_parameters()
    new_weight = conv.lin.weight.data
    assert not F.allclose(old_weight, new_weight)

158
def test_set2set():
159
    ctx = F.ctx()
160
    g = dgl.DGLGraph(nx.path_graph(10))
161
    g = g.to(F.ctx())
162
163

    s2s = nn.Set2Set(5, 3, 3) # hidden size 5, 3 iters, 3 layers
164
    s2s = s2s.to(ctx)
165
166
167
    print(s2s)

    # test#1: basic
168
    h0 = F.randn((g.number_of_nodes(), 5))
169
    h1 = s2s(g, h0)
170
    assert h1.shape[0] == 1 and h1.shape[1] == 10 and h1.dim() == 2
171
172

    # test#2: batched graph
173
174
    g1 = dgl.DGLGraph(nx.path_graph(11)).to(F.ctx())
    g2 = dgl.DGLGraph(nx.path_graph(5)).to(F.ctx())
175
    bg = dgl.batch([g, g1, g2])
176
    h0 = F.randn((bg.number_of_nodes(), 5))
177
    h1 = s2s(bg, h0)
178
179
180
    assert h1.shape[0] == 3 and h1.shape[1] == 10 and h1.dim() == 2

def test_glob_att_pool():
181
    ctx = F.ctx()
182
    g = dgl.DGLGraph(nx.path_graph(10))
183
    g = g.to(F.ctx())
184
185

    gap = nn.GlobalAttentionPooling(th.nn.Linear(5, 1), th.nn.Linear(5, 10))
186
    gap = gap.to(ctx)
187
188
189
    print(gap)

    # test#1: basic
190
    h0 = F.randn((g.number_of_nodes(), 5))
191
    h1 = gap(g, h0)
192
    assert h1.shape[0] == 1 and h1.shape[1] == 10 and h1.dim() == 2
193
194
195

    # test#2: batched graph
    bg = dgl.batch([g, g, g, g])
196
    h0 = F.randn((bg.number_of_nodes(), 5))
197
    h1 = gap(bg, h0)
198
199
200
    assert h1.shape[0] == 4 and h1.shape[1] == 10 and h1.dim() == 2

def test_simple_pool():
201
    ctx = F.ctx()
202
    g = dgl.DGLGraph(nx.path_graph(15))
203
    g = g.to(F.ctx())
204
205
206
207
208
209
210
211

    sum_pool = nn.SumPooling()
    avg_pool = nn.AvgPooling()
    max_pool = nn.MaxPooling()
    sort_pool = nn.SortPooling(10) # k = 10
    print(sum_pool, avg_pool, max_pool, sort_pool)

    # test#1: basic
212
    h0 = F.randn((g.number_of_nodes(), 5))
213
214
215
216
    sum_pool = sum_pool.to(ctx)
    avg_pool = avg_pool.to(ctx)
    max_pool = max_pool.to(ctx)
    sort_pool = sort_pool.to(ctx)
217
    h1 = sum_pool(g, h0)
218
    assert F.allclose(F.squeeze(h1, 0), F.sum(h0, 0))
219
    h1 = avg_pool(g, h0)
220
    assert F.allclose(F.squeeze(h1, 0), F.mean(h0, 0))
221
    h1 = max_pool(g, h0)
222
    assert F.allclose(F.squeeze(h1, 0), F.max(h0, 0))
223
    h1 = sort_pool(g, h0)
224
    assert h1.shape[0] == 1 and h1.shape[1] == 10 * 5 and h1.dim() == 2
225
226

    # test#2: batched graph
227
    g_ = dgl.DGLGraph(nx.path_graph(5)).to(F.ctx())
228
    bg = dgl.batch([g, g_, g, g_, g])
229
    h0 = F.randn((bg.number_of_nodes(), 5))
230
    h1 = sum_pool(bg, h0)
231
232
233
234
235
236
    truth = th.stack([F.sum(h0[:15], 0),
                      F.sum(h0[15:20], 0),
                      F.sum(h0[20:35], 0),
                      F.sum(h0[35:40], 0),
                      F.sum(h0[40:55], 0)], 0)
    assert F.allclose(h1, truth)
237

238
    h1 = avg_pool(bg, h0)
239
240
241
242
243
244
    truth = th.stack([F.mean(h0[:15], 0),
                      F.mean(h0[15:20], 0),
                      F.mean(h0[20:35], 0),
                      F.mean(h0[35:40], 0),
                      F.mean(h0[40:55], 0)], 0)
    assert F.allclose(h1, truth)
245

246
    h1 = max_pool(bg, h0)
247
248
249
250
251
252
    truth = th.stack([F.max(h0[:15], 0),
                      F.max(h0[15:20], 0),
                      F.max(h0[20:35], 0),
                      F.max(h0[35:40], 0),
                      F.max(h0[40:55], 0)], 0)
    assert F.allclose(h1, truth)
253

254
    h1 = sort_pool(bg, h0)
255
256
257
    assert h1.shape[0] == 5 and h1.shape[1] == 10 * 5 and h1.dim() == 2

def test_set_trans():
258
    ctx = F.ctx()
259
260
261
262
263
    g = dgl.DGLGraph(nx.path_graph(15))

    st_enc_0 = nn.SetTransformerEncoder(50, 5, 10, 100, 2, 'sab')
    st_enc_1 = nn.SetTransformerEncoder(50, 5, 10, 100, 2, 'isab', 3)
    st_dec = nn.SetTransformerDecoder(50, 5, 10, 100, 2, 4)
264
265
266
    st_enc_0 = st_enc_0.to(ctx)
    st_enc_1 = st_enc_1.to(ctx)
    st_dec = st_dec.to(ctx)
267
268
269
    print(st_enc_0, st_enc_1, st_dec)

    # test#1: basic
270
    h0 = F.randn((g.number_of_nodes(), 50))
271
    h1 = st_enc_0(g, h0)
272
    assert h1.shape == h0.shape
273
    h1 = st_enc_1(g, h0)
274
    assert h1.shape == h0.shape
275
    h2 = st_dec(g, h1)
276
    assert h2.shape[0] == 1 and h2.shape[1] == 200 and h2.dim() == 2
277
278
279
280
281

    # test#2: batched graph
    g1 = dgl.DGLGraph(nx.path_graph(5))
    g2 = dgl.DGLGraph(nx.path_graph(10))
    bg = dgl.batch([g, g1, g2])
282
    h0 = F.randn((bg.number_of_nodes(), 50))
283
    h1 = st_enc_0(bg, h0)
284
    assert h1.shape == h0.shape
285
    h1 = st_enc_1(bg, h0)
286
287
    assert h1.shape == h0.shape

288
    h2 = st_dec(bg, h1)
289
290
    assert h2.shape[0] == 3 and h2.shape[1] == 200 and h2.dim() == 2

Minjie Wang's avatar
Minjie Wang committed
291
292
293
294
def test_rgcn():
    ctx = F.ctx()
    etype = []
    g = dgl.DGLGraph(sp.sparse.random(100, 100, density=0.1), readonly=True)
295
    g = g.to(F.ctx())
Minjie Wang's avatar
Minjie Wang committed
296
297
298
299
300
301
302
303
304
    # 5 etypes
    R = 5
    for i in range(g.number_of_edges()):
        etype.append(i % 5)
    B = 2
    I = 10
    O = 8

    rgc_basis = nn.RelGraphConv(I, O, R, "basis", B).to(ctx)
305
306
307
    rgc_basis_low = nn.RelGraphConv(I, O, R, "basis", B, low_mem=True).to(ctx)
    rgc_basis_low.weight = rgc_basis.weight
    rgc_basis_low.w_comp = rgc_basis.w_comp
308
    rgc_basis_low.loop_weight = rgc_basis.loop_weight
Minjie Wang's avatar
Minjie Wang committed
309
310
311
    h = th.randn((100, I)).to(ctx)
    r = th.tensor(etype).to(ctx)
    h_new = rgc_basis(g, h, r)
312
    h_new_low = rgc_basis_low(g, h, r)
Minjie Wang's avatar
Minjie Wang committed
313
    assert list(h_new.shape) == [100, O]
314
315
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)
Minjie Wang's avatar
Minjie Wang committed
316
317

    rgc_bdd = nn.RelGraphConv(I, O, R, "bdd", B).to(ctx)
318
319
    rgc_bdd_low = nn.RelGraphConv(I, O, R, "bdd", B, low_mem=True).to(ctx)
    rgc_bdd_low.weight = rgc_bdd.weight
320
    rgc_bdd_low.loop_weight = rgc_bdd.loop_weight
Minjie Wang's avatar
Minjie Wang committed
321
322
323
    h = th.randn((100, I)).to(ctx)
    r = th.tensor(etype).to(ctx)
    h_new = rgc_bdd(g, h, r)
324
    h_new_low = rgc_bdd_low(g, h, r)
Minjie Wang's avatar
Minjie Wang committed
325
    assert list(h_new.shape) == [100, O]
326
327
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)
Minjie Wang's avatar
Minjie Wang committed
328
329

    # with norm
xiang song(charlie.song)'s avatar
xiang song(charlie.song) committed
330
    norm = th.rand((g.number_of_edges(), 1)).to(ctx)
Minjie Wang's avatar
Minjie Wang committed
331
332

    rgc_basis = nn.RelGraphConv(I, O, R, "basis", B).to(ctx)
333
334
335
    rgc_basis_low = nn.RelGraphConv(I, O, R, "basis", B, low_mem=True).to(ctx)
    rgc_basis_low.weight = rgc_basis.weight
    rgc_basis_low.w_comp = rgc_basis.w_comp
336
    rgc_basis_low.loop_weight = rgc_basis.loop_weight
Minjie Wang's avatar
Minjie Wang committed
337
338
339
    h = th.randn((100, I)).to(ctx)
    r = th.tensor(etype).to(ctx)
    h_new = rgc_basis(g, h, r, norm)
340
    h_new_low = rgc_basis_low(g, h, r, norm)
Minjie Wang's avatar
Minjie Wang committed
341
    assert list(h_new.shape) == [100, O]
342
343
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)
Minjie Wang's avatar
Minjie Wang committed
344
345

    rgc_bdd = nn.RelGraphConv(I, O, R, "bdd", B).to(ctx)
346
347
    rgc_bdd_low = nn.RelGraphConv(I, O, R, "bdd", B, low_mem=True).to(ctx)
    rgc_bdd_low.weight = rgc_bdd.weight
348
    rgc_bdd_low.loop_weight = rgc_bdd.loop_weight
Minjie Wang's avatar
Minjie Wang committed
349
350
351
    h = th.randn((100, I)).to(ctx)
    r = th.tensor(etype).to(ctx)
    h_new = rgc_bdd(g, h, r, norm)
352
    h_new_low = rgc_bdd_low(g, h, r, norm)
Minjie Wang's avatar
Minjie Wang committed
353
    assert list(h_new.shape) == [100, O]
354
355
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)
Minjie Wang's avatar
Minjie Wang committed
356
357
358

    # id input
    rgc_basis = nn.RelGraphConv(I, O, R, "basis", B).to(ctx)
359
360
361
    rgc_basis_low = nn.RelGraphConv(I, O, R, "basis", B, low_mem=True).to(ctx)
    rgc_basis_low.weight = rgc_basis.weight
    rgc_basis_low.w_comp = rgc_basis.w_comp
362
    rgc_basis_low.loop_weight = rgc_basis.loop_weight
Minjie Wang's avatar
Minjie Wang committed
363
364
365
    h = th.randint(0, I, (100,)).to(ctx)
    r = th.tensor(etype).to(ctx)
    h_new = rgc_basis(g, h, r)
366
    h_new_low = rgc_basis_low(g, h, r)
Minjie Wang's avatar
Minjie Wang committed
367
    assert list(h_new.shape) == [100, O]
368
369
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)
370

371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409

def test_rgcn_sorted():
    ctx = F.ctx()
    etype = []
    g = dgl.DGLGraph(sp.sparse.random(100, 100, density=0.1), readonly=True)
    g = g.to(F.ctx())
    # 5 etypes
    R = 5
    etype = [200, 200, 200, 200, 200]
    B = 2
    I = 10
    O = 8

    rgc_basis = nn.RelGraphConv(I, O, R, "basis", B).to(ctx)
    rgc_basis_low = nn.RelGraphConv(I, O, R, "basis", B, low_mem=True).to(ctx)
    rgc_basis_low.weight = rgc_basis.weight
    rgc_basis_low.w_comp = rgc_basis.w_comp
    rgc_basis_low.loop_weight = rgc_basis.loop_weight
    h = th.randn((100, I)).to(ctx)
    r = etype
    h_new = rgc_basis(g, h, r)
    h_new_low = rgc_basis_low(g, h, r)
    assert list(h_new.shape) == [100, O]
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)

    rgc_bdd = nn.RelGraphConv(I, O, R, "bdd", B).to(ctx)
    rgc_bdd_low = nn.RelGraphConv(I, O, R, "bdd", B, low_mem=True).to(ctx)
    rgc_bdd_low.weight = rgc_bdd.weight
    rgc_bdd_low.loop_weight = rgc_bdd.loop_weight
    h = th.randn((100, I)).to(ctx)
    r = etype
    h_new = rgc_bdd(g, h, r)
    h_new_low = rgc_bdd_low(g, h, r)
    assert list(h_new.shape) == [100, O]
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)

    # with norm
xiang song(charlie.song)'s avatar
xiang song(charlie.song) committed
410
    norm = th.rand((g.number_of_edges(), 1)).to(ctx)
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451

    rgc_basis = nn.RelGraphConv(I, O, R, "basis", B).to(ctx)
    rgc_basis_low = nn.RelGraphConv(I, O, R, "basis", B, low_mem=True).to(ctx)
    rgc_basis_low.weight = rgc_basis.weight
    rgc_basis_low.w_comp = rgc_basis.w_comp
    rgc_basis_low.loop_weight = rgc_basis.loop_weight
    h = th.randn((100, I)).to(ctx)
    r = etype
    h_new = rgc_basis(g, h, r, norm)
    h_new_low = rgc_basis_low(g, h, r, norm)
    assert list(h_new.shape) == [100, O]
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)

    rgc_bdd = nn.RelGraphConv(I, O, R, "bdd", B).to(ctx)
    rgc_bdd_low = nn.RelGraphConv(I, O, R, "bdd", B, low_mem=True).to(ctx)
    rgc_bdd_low.weight = rgc_bdd.weight
    rgc_bdd_low.loop_weight = rgc_bdd.loop_weight
    h = th.randn((100, I)).to(ctx)
    r = etype
    h_new = rgc_bdd(g, h, r, norm)
    h_new_low = rgc_bdd_low(g, h, r, norm)
    assert list(h_new.shape) == [100, O]
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)

    # id input
    rgc_basis = nn.RelGraphConv(I, O, R, "basis", B).to(ctx)
    rgc_basis_low = nn.RelGraphConv(I, O, R, "basis", B, low_mem=True).to(ctx)
    rgc_basis_low.weight = rgc_basis.weight
    rgc_basis_low.w_comp = rgc_basis.w_comp
    rgc_basis_low.loop_weight = rgc_basis.loop_weight
    h = th.randint(0, I, (100,)).to(ctx)
    r = etype
    h_new = rgc_basis(g, h, r)
    h_new_low = rgc_basis_low(g, h, r)
    assert list(h_new.shape) == [100, O]
    assert list(h_new_low.shape) == [100, O]
    assert F.allclose(h_new, h_new_low)


452
@parametrize_dtype
453
@pytest.mark.parametrize('g', get_cases(['homo', 'block-bipartite'], exclude=['zero-degree']))
454
455
def test_gat_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
456
457
    ctx = F.ctx()
    gat = nn.GATConv(5, 2, 4)
458
    feat = F.randn((g.number_of_nodes(), 5))
459
    gat = gat.to(ctx)
460
    h = gat(g, feat)
461
    assert h.shape == (g.number_of_nodes(), 4, 2)
462
463
    _, a = gat(g, feat, get_attention=True)
    assert a.shape == (g.number_of_edges(), 4, 1)
464

465
@parametrize_dtype
466
@pytest.mark.parametrize('g', get_cases(['bipartite'], exclude=['zero-degree']))
467
468
469
def test_gat_conv_bi(g, idtype):
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
470
471
    gat = nn.GATConv(5, 2, 4)
    feat = (F.randn((g.number_of_src_nodes(), 5)), F.randn((g.number_of_dst_nodes(), 5)))
472
473
    gat = gat.to(ctx)
    h = gat(g, feat)
474
    assert h.shape == (g.number_of_dst_nodes(), 4, 2)
475
476
    _, a = gat(g, feat, get_attention=True)
    assert a.shape == (g.number_of_edges(), 4, 1)
477

478
@parametrize_dtype
479
@pytest.mark.parametrize('g', get_cases(['homo', 'block-bipartite']))
480
@pytest.mark.parametrize('aggre_type', ['mean', 'pool', 'gcn', 'lstm'])
481
482
def test_sage_conv(idtype, g, aggre_type):
    g = g.astype(idtype).to(F.ctx())
483
    sage = nn.SAGEConv(5, 10, aggre_type)
484
485
    feat = F.randn((g.number_of_nodes(), 5))
    sage = sage.to(F.ctx())
486
487
488
    h = sage(g, feat)
    assert h.shape[-1] == 10

489
@parametrize_dtype
490
@pytest.mark.parametrize('g', get_cases(['bipartite']))
491
492
493
@pytest.mark.parametrize('aggre_type', ['mean', 'pool', 'gcn', 'lstm'])
def test_sage_conv_bi(idtype, g, aggre_type):
    g = g.astype(idtype).to(F.ctx())
494
495
    dst_dim = 5 if aggre_type != 'gcn' else 10
    sage = nn.SAGEConv((10, dst_dim), 2, aggre_type)
496
497
    feat = (F.randn((g.number_of_src_nodes(), 10)), F.randn((g.number_of_dst_nodes(), dst_dim)))
    sage = sage.to(F.ctx())
498
499
    h = sage(g, feat)
    assert h.shape[-1] == 2
500
    assert h.shape[0] == g.number_of_dst_nodes()
501

502
503
504
@parametrize_dtype
def test_sage_conv2(idtype):
    # TODO: add test for blocks
Mufei Li's avatar
Mufei Li committed
505
    # Test the case for graphs without edges
506
    g = dgl.heterograph({('_U', '_E', '_V'): ([], [])}, {'_U': 5, '_V': 3})
507
508
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
509
    sage = nn.SAGEConv((3, 3), 2, 'gcn')
Mufei Li's avatar
Mufei Li committed
510
511
    feat = (F.randn((5, 3)), F.randn((3, 3)))
    sage = sage.to(ctx)
512
    h = sage(g, (F.copy_to(feat[0], F.ctx()), F.copy_to(feat[1], F.ctx())))
Mufei Li's avatar
Mufei Li committed
513
514
515
    assert h.shape[-1] == 2
    assert h.shape[0] == 3
    for aggre_type in ['mean', 'pool', 'lstm']:
516
        sage = nn.SAGEConv((3, 1), 2, aggre_type)
Mufei Li's avatar
Mufei Li committed
517
518
519
520
521
522
        feat = (F.randn((5, 3)), F.randn((3, 1)))
        sage = sage.to(ctx)
        h = sage(g, feat)
        assert h.shape[-1] == 2
        assert h.shape[0] == 3

523
524
525
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['homo'], exclude=['zero-degree']))
def test_sgc_conv(g, idtype):
526
    ctx = F.ctx()
527
    g = g.astype(idtype).to(ctx)
528
529
    # not cached
    sgc = nn.SGConv(5, 10, 3)
530
    feat = F.randn((g.number_of_nodes(), 5))
531
    sgc = sgc.to(ctx)
532

533
    h = sgc(g, feat)
534
535
536
537
    assert h.shape[-1] == 10

    # cached
    sgc = nn.SGConv(5, 10, 3, True)
538
    sgc = sgc.to(ctx)
539
540
    h_0 = sgc(g, feat)
    h_1 = sgc(g, feat + 1)
541
542
543
    assert F.allclose(h_0, h_1)
    assert h_0.shape[-1] == 10

544
545
546
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['homo'], exclude=['zero-degree']))
def test_appnp_conv(g, idtype):
547
    ctx = F.ctx()
548
    g = g.astype(idtype).to(ctx)
549
    appnp = nn.APPNPConv(10, 0.1)
550
    feat = F.randn((g.number_of_nodes(), 5))
551
    appnp = appnp.to(ctx)
552

553
    h = appnp(g, feat)
554
555
    assert h.shape[-1] == 5

556
@parametrize_dtype
557
@pytest.mark.parametrize('g', get_cases(['homo', 'block-bipartite'], exclude=['zero-degree']))
558
@pytest.mark.parametrize('aggregator_type', ['mean', 'max', 'sum'])
559
560
def test_gin_conv(g, idtype, aggregator_type):
    g = g.astype(idtype).to(F.ctx())
561
562
563
564
565
    ctx = F.ctx()
    gin = nn.GINConv(
        th.nn.Linear(5, 12),
        aggregator_type
    )
566
    feat = F.randn((g.number_of_nodes(), 5))
567
568
    gin = gin.to(ctx)
    h = gin(g, feat)
569
    assert h.shape == (g.number_of_nodes(), 12)
570

571
@parametrize_dtype
572
@pytest.mark.parametrize('g', get_cases(['bipartite'], exclude=['zero-degree']))
573
574
575
576
@pytest.mark.parametrize('aggregator_type', ['mean', 'max', 'sum'])
def test_gin_conv_bi(g, idtype, aggregator_type):
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
577
578
579
580
    gin = nn.GINConv(
        th.nn.Linear(5, 12),
        aggregator_type
    )
581
    feat = (F.randn((g.number_of_src_nodes(), 5)), F.randn((g.number_of_dst_nodes(), 5)))
582
583
    gin = gin.to(ctx)
    h = gin(g, feat)
584
    assert h.shape == (g.number_of_dst_nodes(), 12)
585

586
@parametrize_dtype
587
@pytest.mark.parametrize('g', get_cases(['homo', 'block-bipartite'], exclude=['zero-degree']))
588
589
def test_agnn_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
590
591
    ctx = F.ctx()
    agnn = nn.AGNNConv(1)
592
    feat = F.randn((g.number_of_nodes(), 5))
593
    agnn = agnn.to(ctx)
594
    h = agnn(g, feat)
595
    assert h.shape == (g.number_of_nodes(), 5)
596

597
@parametrize_dtype
598
@pytest.mark.parametrize('g', get_cases(['bipartite'], exclude=['zero-degree']))
599
600
601
def test_agnn_conv_bi(g, idtype):
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
602
    agnn = nn.AGNNConv(1)
603
    feat = (F.randn((g.number_of_src_nodes(), 5)), F.randn((g.number_of_dst_nodes(), 5)))
604
605
    agnn = agnn.to(ctx)
    h = agnn(g, feat)
606
    assert h.shape == (g.number_of_dst_nodes(), 5)
607

608
609
610
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['homo'], exclude=['zero-degree']))
def test_gated_graph_conv(g, idtype):
611
    ctx = F.ctx()
612
    g = g.astype(idtype).to(ctx)
613
614
    ggconv = nn.GatedGraphConv(5, 10, 5, 3)
    etypes = th.arange(g.number_of_edges()) % 3
615
    feat = F.randn((g.number_of_nodes(), 5))
616
617
    ggconv = ggconv.to(ctx)
    etypes = etypes.to(ctx)
618

619
    h = ggconv(g, feat, etypes)
620
621
622
    # current we only do shape check
    assert h.shape[-1] == 10

623
@parametrize_dtype
624
@pytest.mark.parametrize('g', get_cases(['homo', 'block-bipartite'], exclude=['zero-degree']))
625
626
def test_nn_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
627
628
629
    ctx = F.ctx()
    edge_func = th.nn.Linear(4, 5 * 10)
    nnconv = nn.NNConv(5, 10, edge_func, 'mean')
630
    feat = F.randn((g.number_of_nodes(), 5))
631
632
633
634
635
636
    efeat = F.randn((g.number_of_edges(), 4))
    nnconv = nnconv.to(ctx)
    h = nnconv(g, feat, efeat)
    # currently we only do shape check
    assert h.shape[-1] == 10

637
@parametrize_dtype
638
@pytest.mark.parametrize('g', get_cases(['bipartite'], exclude=['zero-degree']))
639
640
641
def test_nn_conv_bi(g, idtype):
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
642
643
    edge_func = th.nn.Linear(4, 5 * 10)
    nnconv = nn.NNConv((5, 2), 10, edge_func, 'mean')
644
645
    feat = F.randn((g.number_of_src_nodes(), 5))
    feat_dst = F.randn((g.number_of_dst_nodes(), 2))
646
647
648
649
650
651
    efeat = F.randn((g.number_of_edges(), 4))
    nnconv = nnconv.to(ctx)
    h = nnconv(g, (feat, feat_dst), efeat)
    # currently we only do shape check
    assert h.shape[-1] == 10

652
@parametrize_dtype
653
@pytest.mark.parametrize('g', get_cases(['homo'], exclude=['zero-degree']))
654
655
def test_gmm_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
656
657
    ctx = F.ctx()
    gmmconv = nn.GMMConv(5, 10, 3, 4, 'mean')
658
    feat = F.randn((g.number_of_nodes(), 5))
659
    pseudo = F.randn((g.number_of_edges(), 3))
660
    gmmconv = gmmconv.to(ctx)
661
    h = gmmconv(g, feat, pseudo)
662
663
664
    # currently we only do shape check
    assert h.shape[-1] == 10

665
@parametrize_dtype
666
@pytest.mark.parametrize('g', get_cases(['bipartite', 'block-bipartite'], exclude=['zero-degree']))
667
668
669
def test_gmm_conv_bi(g, idtype):
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
670
    gmmconv = nn.GMMConv((5, 2), 10, 3, 4, 'mean')
671
672
    feat = F.randn((g.number_of_src_nodes(), 5))
    feat_dst = F.randn((g.number_of_dst_nodes(), 2))
673
674
675
676
677
678
    pseudo = F.randn((g.number_of_edges(), 3))
    gmmconv = gmmconv.to(ctx)
    h = gmmconv(g, (feat, feat_dst), pseudo)
    # currently we only do shape check
    assert h.shape[-1] == 10

679
@parametrize_dtype
680
@pytest.mark.parametrize('norm_type', ['both', 'right', 'none'])
681
@pytest.mark.parametrize('g', get_cases(['homo', 'bipartite'], exclude=['zero-degree']))
682
683
def test_dense_graph_conv(norm_type, g, idtype):
    g = g.astype(idtype).to(F.ctx())
684
    ctx = F.ctx()
685
    # TODO(minjie): enable the following option after #1385
686
    adj = g.adjacency_matrix(transpose=False, ctx=ctx).to_dense()
687
688
    conv = nn.GraphConv(5, 2, norm=norm_type, bias=True)
    dense_conv = nn.DenseGraphConv(5, 2, norm=norm_type, bias=True)
689
690
    dense_conv.weight.data = conv.weight.data
    dense_conv.bias.data = conv.bias.data
691
    feat = F.randn((g.number_of_src_nodes(), 5))
692
693
    conv = conv.to(ctx)
    dense_conv = dense_conv.to(ctx)
694
695
    out_conv = conv(g, feat)
    out_dense_conv = dense_conv(adj, feat)
696
697
    assert F.allclose(out_conv, out_dense_conv)

698
@parametrize_dtype
699
@pytest.mark.parametrize('g', get_cases(['homo', 'bipartite']))
700
701
def test_dense_sage_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
702
    ctx = F.ctx()
703
    adj = g.adjacency_matrix(transpose=False, ctx=ctx).to_dense()
704
    sage = nn.SAGEConv(5, 2, 'gcn')
705
706
707
    dense_sage = nn.DenseSAGEConv(5, 2)
    dense_sage.fc.weight.data = sage.fc_neigh.weight.data
    dense_sage.fc.bias.data = sage.fc_neigh.bias.data
708
709
710
711
712
713
714
    if len(g.ntypes) == 2:
        feat = (
            F.randn((g.number_of_src_nodes(), 5)),
            F.randn((g.number_of_dst_nodes(), 5))
        )
    else:
        feat = F.randn((g.number_of_nodes(), 5))
715
716
    sage = sage.to(ctx)
    dense_sage = dense_sage.to(ctx)
717
718
    out_sage = sage(g, feat)
    out_dense_sage = dense_sage(adj, feat)
719
720
    assert F.allclose(out_sage, out_dense_sage), g

721
@parametrize_dtype
722
@pytest.mark.parametrize('g', get_cases(['homo', 'block-bipartite'], exclude=['zero-degree']))
723
724
def test_edge_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
725
726
727
    ctx = F.ctx()
    edge_conv = nn.EdgeConv(5, 2).to(ctx)
    print(edge_conv)
728
729
730
    h0 = F.randn((g.number_of_nodes(), 5))
    h1 = edge_conv(g, h0)
    assert h1.shape == (g.number_of_nodes(), 2)
731

732
@parametrize_dtype
733
@pytest.mark.parametrize('g', get_cases(['bipartite'], exclude=['zero-degree']))
734
735
736
737
738
def test_edge_conv_bi(g, idtype):
    g = g.astype(idtype).to(F.ctx())
    ctx = F.ctx()
    edge_conv = nn.EdgeConv(5, 2).to(ctx)
    print(edge_conv)
739
    h0 = F.randn((g.number_of_src_nodes(), 5))
740
741
    x0 = F.randn((g.number_of_dst_nodes(), 5))
    h1 = edge_conv(g, (h0, x0))
742
    assert h1.shape == (g.number_of_dst_nodes(), 2)
743
744
745
746
747

def test_dense_cheb_conv():
    for k in range(1, 4):
        ctx = F.ctx()
        g = dgl.DGLGraph(sp.sparse.random(100, 100, density=0.1), readonly=True)
748
        g = g.to(F.ctx())
749
        adj = g.adjacency_matrix(transpose=False, ctx=ctx).to_dense()
Axel Nilsson's avatar
Axel Nilsson committed
750
        cheb = nn.ChebConv(5, 2, k, None)
751
        dense_cheb = nn.DenseChebConv(5, 2, k)
Axel Nilsson's avatar
Axel Nilsson committed
752
753
754
755
756
        #for i in range(len(cheb.fc)):
        #    dense_cheb.W.data[i] = cheb.fc[i].weight.data.t()
        dense_cheb.W.data = cheb.linear.weight.data.transpose(-1, -2).view(k, 5, 2)
        if cheb.linear.bias is not None:
            dense_cheb.bias.data = cheb.linear.bias.data
757
        feat = F.randn((100, 5))
758
759
        cheb = cheb.to(ctx)
        dense_cheb = dense_cheb.to(ctx)
760
761
        out_cheb = cheb(g, feat, [2.0])
        out_dense_cheb = dense_cheb(adj, feat, 2.0)
Axel Nilsson's avatar
Axel Nilsson committed
762
        print(k, out_cheb, out_dense_cheb)
763
764
        assert F.allclose(out_cheb, out_dense_cheb)

765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
def test_sequential():
    ctx = F.ctx()
    # Test single graph
    class ExampleLayer(th.nn.Module):
        def __init__(self):
            super().__init__()

        def forward(self, graph, n_feat, e_feat):
            graph = graph.local_var()
            graph.ndata['h'] = n_feat
            graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h'))
            n_feat += graph.ndata['h']
            graph.apply_edges(fn.u_add_v('h', 'h', 'e'))
            e_feat += graph.edata['e']
            return n_feat, e_feat

    g = dgl.DGLGraph()
    g.add_nodes(3)
    g.add_edges([0, 1, 2, 0, 1, 2, 0, 1, 2], [0, 0, 0, 1, 1, 1, 2, 2, 2])
784
    g = g.to(F.ctx())
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
    net = nn.Sequential(ExampleLayer(), ExampleLayer(), ExampleLayer())
    n_feat = F.randn((3, 4))
    e_feat = F.randn((9, 4))
    net = net.to(ctx)
    n_feat, e_feat = net(g, n_feat, e_feat)
    assert n_feat.shape == (3, 4)
    assert e_feat.shape == (9, 4)

    # Test multiple graph
    class ExampleLayer(th.nn.Module):
        def __init__(self):
            super().__init__()

        def forward(self, graph, n_feat):
            graph = graph.local_var()
            graph.ndata['h'] = n_feat
            graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h'))
            n_feat += graph.ndata['h']
            return n_feat.view(graph.number_of_nodes() // 2, 2, -1).sum(1)

805
806
807
    g1 = dgl.DGLGraph(nx.erdos_renyi_graph(32, 0.05)).to(F.ctx())
    g2 = dgl.DGLGraph(nx.erdos_renyi_graph(16, 0.2)).to(F.ctx())
    g3 = dgl.DGLGraph(nx.erdos_renyi_graph(8, 0.8)).to(F.ctx())
808
809
810
811
812
813
    net = nn.Sequential(ExampleLayer(), ExampleLayer(), ExampleLayer())
    net = net.to(ctx)
    n_feat = F.randn((32, 4))
    n_feat = net([g1, g2, g3], n_feat)
    assert n_feat.shape == (4, 4)

814
815
816
817
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['homo'], exclude=['zero-degree']))
def test_atomic_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
818
819
820
821
822
823
824
825
826
    aconv = nn.AtomicConv(interaction_cutoffs=F.tensor([12.0, 12.0]),
                          rbf_kernel_means=F.tensor([0.0, 2.0]),
                          rbf_kernel_scaling=F.tensor([4.0, 4.0]),
                          features_to_use=F.tensor([6.0, 8.0]))

    ctx = F.ctx()
    if F.gpu_ctx():
        aconv = aconv.to(ctx)

827
    feat = F.randn((g.number_of_nodes(), 1))
828
829
830
831
832
833
    dist = F.randn((g.number_of_edges(), 1))

    h = aconv(g, feat, dist)
    # current we only do shape check
    assert h.shape[-1] == 4

834
835
836
837
@parametrize_dtype
@pytest.mark.parametrize('g', get_cases(['homo'], exclude=['zero-degree']))
def test_cf_conv(g, idtype):
    g = g.astype(idtype).to(F.ctx())
838
839
840
841
842
843
844
845
846
    cfconv = nn.CFConv(node_in_feats=2,
                       edge_in_feats=3,
                       hidden_feats=2,
                       out_feats=3)

    ctx = F.ctx()
    if F.gpu_ctx():
        cfconv = cfconv.to(ctx)

847
    node_feats = F.randn((g.number_of_nodes(), 2))
848
849
850
    edge_feats = F.randn((g.number_of_edges(), 3))
    h = cfconv(g, node_feats, edge_feats)
    # current we only do shape check
851
    assert h.shape[-1] == 3
852

853
854
855
856
857
858
def myagg(alist, dsttype):
    rst = alist[0]
    for i in range(1, len(alist)):
        rst = rst + (i + 1) * alist[i]
    return rst

859
@parametrize_dtype
860
@pytest.mark.parametrize('agg', ['sum', 'max', 'min', 'mean', 'stack', myagg])
861
def test_hetero_conv(agg, idtype):
862
    g = dgl.heterograph({
863
864
865
        ('user', 'follows', 'user'): ([0, 0, 2, 1], [1, 2, 1, 3]),
        ('user', 'plays', 'game'): ([0, 0, 0, 1, 2], [0, 2, 3, 0, 2]),
        ('store', 'sells', 'game'): ([0, 0, 1, 1], [0, 3, 1, 2])},
866
        idtype=idtype, device=F.ctx())
867
    conv = nn.HeteroGraphConv({
868
869
870
        'follows': nn.GraphConv(2, 3, allow_zero_in_degree=True),
        'plays': nn.GraphConv(2, 4, allow_zero_in_degree=True),
        'sells': nn.GraphConv(3, 4, allow_zero_in_degree=True)},
871
        agg)
872
    conv = conv.to(F.ctx())
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
    uf = F.randn((4, 2))
    gf = F.randn((4, 4))
    sf = F.randn((2, 3))

    h = conv(g, {'user': uf})
    assert set(h.keys()) == {'user', 'game'}
    if agg != 'stack':
        assert h['user'].shape == (4, 3)
        assert h['game'].shape == (4, 4)
    else:
        assert h['user'].shape == (4, 1, 3)
        assert h['game'].shape == (4, 1, 4)

    h = conv(g, {'user': uf, 'store': sf})
    assert set(h.keys()) == {'user', 'game'}
    if agg != 'stack':
        assert h['user'].shape == (4, 3)
        assert h['game'].shape == (4, 4)
    else:
        assert h['user'].shape == (4, 1, 3)
        assert h['game'].shape == (4, 2, 4)

    h = conv(g, {'store': sf})
    assert set(h.keys()) == {'game'}
    if agg != 'stack':
        assert h['game'].shape == (4, 4)
    else:
        assert h['game'].shape == (4, 1, 4)

    # test with pair input
    conv = nn.HeteroGraphConv({
904
905
906
        'follows': nn.SAGEConv(2, 3, 'mean'),
        'plays': nn.SAGEConv((2, 4), 4, 'mean'),
        'sells': nn.SAGEConv(3, 4, 'mean')},
907
        agg)
908
    conv = conv.to(F.ctx())
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948

    h = conv(g, ({'user': uf}, {'user' : uf, 'game' : gf}))
    assert set(h.keys()) == {'user', 'game'}
    if agg != 'stack':
        assert h['user'].shape == (4, 3)
        assert h['game'].shape == (4, 4)
    else:
        assert h['user'].shape == (4, 1, 3)
        assert h['game'].shape == (4, 1, 4)

    # pair input requires both src and dst type features to be provided
    h = conv(g, ({'user': uf}, {'game' : gf}))
    assert set(h.keys()) == {'game'}
    if agg != 'stack':
        assert h['game'].shape == (4, 4)
    else:
        assert h['game'].shape == (4, 1, 4)

    # test with mod args
    class MyMod(th.nn.Module):
        def __init__(self, s1, s2):
            super(MyMod, self).__init__()
            self.carg1 = 0
            self.carg2 = 0
            self.s1 = s1
            self.s2 = s2
        def forward(self, g, h, arg1=None, *, arg2=None):
            if arg1 is not None:
                self.carg1 += 1
            if arg2 is not None:
                self.carg2 += 1
            return th.zeros((g.number_of_dst_nodes(), self.s2))
    mod1 = MyMod(2, 3)
    mod2 = MyMod(2, 4)
    mod3 = MyMod(3, 4)
    conv = nn.HeteroGraphConv({
        'follows': mod1,
        'plays': mod2,
        'sells': mod3},
        agg)
949
    conv = conv.to(F.ctx())
950
951
952
953
954
955
956
957
958
959
    mod_args = {'follows' : (1,), 'plays' : (1,)}
    mod_kwargs = {'sells' : {'arg2' : 'abc'}}
    h = conv(g, {'user' : uf, 'store' : sf}, mod_args=mod_args, mod_kwargs=mod_kwargs)
    assert mod1.carg1 == 1
    assert mod1.carg2 == 0
    assert mod2.carg1 == 1
    assert mod2.carg2 == 0
    assert mod3.carg1 == 0
    assert mod3.carg2 == 1

960
961
if __name__ == '__main__':
    test_graph_conv()
962
963
964
965
    test_set2set()
    test_glob_att_pool()
    test_simple_pool()
    test_set_trans()
Minjie Wang's avatar
Minjie Wang committed
966
    test_rgcn()
xiang song(charlie.song)'s avatar
xiang song(charlie.song) committed
967
    test_rgcn_sorted()
968
969
970
971
972
973
974
975
976
977
978
979
980
    test_tagconv()
    test_gat_conv()
    test_sage_conv()
    test_sgc_conv()
    test_appnp_conv()
    test_gin_conv()
    test_agnn_conv()
    test_gated_graph_conv()
    test_nn_conv()
    test_gmm_conv()
    test_dense_graph_conv()
    test_dense_sage_conv()
    test_dense_cheb_conv()
981
    test_sequential()
982
    test_atomic_conv()
983
    test_cf_conv()