"git@developer.sourcefind.cn:OpenDAS/torch-cluster.git" did not exist on "2f1d8920660b96adeca2d1e90221b67eddd7438b"
test_edge_softmax.py 6.27 KB
Newer Older
1
import itertools
2
3
4
5
import math
import unittest
from collections import Counter

6
import backend as F
7
8
9

import dgl
import dgl.function as fn
10
import networkx as nx
11
12
13
14
15
import numpy as np
import pytest
import scipy.sparse as ssp
from dgl import DGLError
from dgl.ops import edge_softmax
16
from scipy.sparse import rand
17
from utils import get_cases, parametrize_idtype
18

19
edge_softmax_shapes = [(1,), (1, 3), (3, 4, 5)]
20
21
rfuncs = {"sum": fn.sum, "max": fn.max, "min": fn.min, "mean": fn.mean}
fill_value = {"sum": 0, "max": float("-inf")}
22
23
feat_size = 2

24

25
26
27
28
29
30
@pytest.mark.parametrize("g", get_cases(["clique"]))
@pytest.mark.parametrize("norm_by", ["src", "dst"])
@pytest.mark.parametrize("shp", edge_softmax_shapes)
@parametrize_idtype
def test_edge_softmax(g, norm_by, shp, idtype):
    g = g.astype(idtype).to(F.ctx())
Hongzhi (Steve), Chen's avatar
Hongzhi (Steve), Chen committed
31
    edata = F.tensor(np.random.rand(g.num_edges(), *shp))
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
    e1 = F.attach_grad(F.clone(edata))

    with F.record_grad():
        score1 = edge_softmax(g, e1, norm_by=norm_by)
        F.backward(F.reduce_sum(score1))
        grad_edata = F.grad(e1)

    with F.record_grad():
        e2 = F.attach_grad(F.clone(edata))
        e2_2d = F.reshape(
            e2,
            (g.number_of_src_nodes(), g.number_of_dst_nodes(), *e2.shape[1:]),
        )
        if norm_by == "src":
            score2 = F.softmax(e2_2d, 1)
            score2 = F.reshape(score2, (-1, *e2.shape[1:]))
        if norm_by == "dst":
            score2 = F.softmax(e2_2d, 0)
            score2 = F.reshape(score2, (-1, *e2.shape[1:]))
        assert F.allclose(score1, score2)
        print("forward passed")

        F.backward(F.reduce_sum(score2))
        assert F.allclose(F.grad(e2), grad_edata)
        print("backward passed")


59
60
61
62
63
64
65
66
67
def create_test_heterograph(idtype):
    # test heterograph from the docstring, plus a user -- wishes -- game relation
    # 3 users, 2 games, 2 developers
    # metagraph:
    #    ('user', 'follows', 'user'),
    #    ('user', 'plays', 'game'),
    #    ('user', 'wishes', 'game'),
    #    ('developer', 'develops', 'game')])

68
69
70
71
72
73
74
75
76
77
    g = dgl.heterograph(
        {
            ("user", "follows", "user"): ([0, 1, 2, 1, 1], [0, 0, 1, 1, 2]),
            ("user", "plays", "game"): ([0, 1, 2, 1], [0, 0, 1, 1]),
            ("user", "wishes", "game"): ([0, 1, 1], [0, 0, 1]),
            ("developer", "develops", "game"): ([0, 1, 0], [0, 1, 1]),
        },
        idtype=idtype,
        device=F.ctx(),
    )
78
79
80
    assert g.idtype == idtype
    assert g.device == F.ctx()
    return g
81
82
83
84
85


@unittest.skipIf(
    dgl.backend.backend_name != "pytorch", reason="Only support PyTorch for now"
)
86
def test_edge_softmax_unidirectional():
87
88
89
90
91
92
93
94
95
96
97
98
    g = dgl.heterograph(
        {
            ("A", "AB", "B"): (
                [1, 2, 3, 1, 2, 3, 1, 2, 3],
                [0, 0, 0, 1, 1, 1, 2, 2, 2],
            ),
            ("B", "BB", "B"): (
                [0, 1, 2, 0, 1, 2, 0, 1, 2],
                [0, 0, 0, 1, 1, 1, 2, 2, 2],
            ),
        }
    )
99
    g = g.to(F.ctx())
100
101
102
103
104
    g.edges["AB"].data["x"] = F.ones(9) * 2
    g.edges["BB"].data["x"] = F.ones(9)
    result = dgl.ops.edge_softmax(
        g, {"AB": g.edges["AB"].data["x"], "BB": g.edges["BB"].data["x"]}
    )
105

106
107
    ab = result["A", "AB", "B"]
    bb = result["B", "BB", "B"]
108
109
110
111
    e2 = F.zeros_like(ab) + math.exp(2) / ((math.exp(2) + math.exp(1)) * 3)
    e1 = F.zeros_like(bb) + math.exp(1) / ((math.exp(2) + math.exp(1)) * 3)
    assert F.allclose(ab, e2)
    assert F.allclose(bb, e1)
112

113

114
115
116
117
118
@unittest.skipIf(
    dgl.backend.backend_name != "pytorch", reason="Only support PyTorch for now"
)
@pytest.mark.parametrize("g", get_cases(["clique"]))
@pytest.mark.parametrize("norm_by", ["src", "dst"])
119
# @pytest.mark.parametrize('shp', edge_softmax_shapes)
nv-dlasalle's avatar
nv-dlasalle committed
120
@parametrize_idtype
121
122
123
124
125
def test_edge_softmax(g, norm_by, idtype):
    print("params", norm_by, idtype)

    g = create_test_heterograph(idtype)

126
127
128
129
    x1 = F.randn((g.num_edges("plays"), feat_size))
    x2 = F.randn((g.num_edges("follows"), feat_size))
    x3 = F.randn((g.num_edges("develops"), feat_size))
    x4 = F.randn((g.num_edges("wishes"), feat_size))
130
131
132
133
134
135

    F.attach_grad(F.clone(x1))
    F.attach_grad(F.clone(x2))
    F.attach_grad(F.clone(x3))
    F.attach_grad(F.clone(x4))

136
137
138
139
    g["plays"].edata["eid"] = x1
    g["follows"].edata["eid"] = x2
    g["develops"].edata["eid"] = x3
    g["wishes"].edata["eid"] = x4
140
141
142
143
144
145
146
147
148
149

    #################################################################
    #  edge_softmax() on homogeneous graph
    #################################################################

    with F.record_grad():
        hm_g = dgl.to_homogeneous(g)
        hm_x = F.cat((x3, x2, x1, x4), 0)
        hm_e = F.attach_grad(F.clone(hm_x))
        score_hm = edge_softmax(hm_g, hm_e, norm_by=norm_by)
150
        hm_g.edata["score"] = score_hm
151
        ht_g = dgl.to_heterogeneous(hm_g, g.ntypes, g.etypes)
152
153
154
155
        r1 = ht_g.edata["score"][("user", "plays", "game")]
        r2 = ht_g.edata["score"][("user", "follows", "user")]
        r3 = ht_g.edata["score"][("developer", "develops", "game")]
        r4 = ht_g.edata["score"][("user", "wishes", "game")]
156
157
158
159
160
161
162
163
164
165
166
        F.backward(F.reduce_sum(r1) + F.reduce_sum(r2))
        grad_edata_hm = F.grad(hm_e)

    #################################################################
    #  edge_softmax() on heterogeneous graph
    #################################################################

    e1 = F.attach_grad(F.clone(x1))
    e2 = F.attach_grad(F.clone(x2))
    e3 = F.attach_grad(F.clone(x3))
    e4 = F.attach_grad(F.clone(x4))
167
168
169
170
171
172
    e = {
        ("user", "follows", "user"): e2,
        ("user", "plays", "game"): e1,
        ("user", "wishes", "game"): e4,
        ("developer", "develops", "game"): e3,
    }
173
174
    with F.record_grad():
        score = edge_softmax(g, e, norm_by=norm_by)
175
176
177
178
        r5 = score[("user", "plays", "game")]
        r6 = score[("user", "follows", "user")]
        r7 = score[("developer", "develops", "game")]
        r8 = score[("user", "wishes", "game")]
179
        F.backward(F.reduce_sum(r5) + F.reduce_sum(r6))
180
181
182
        grad_edata_ht = F.cat(
            (F.grad(e3), F.grad(e2), F.grad(e1), F.grad(e4)), 0
        )
183
184
185
186
187
188
189
        # correctness check
        assert F.allclose(r1, r5)
        assert F.allclose(r2, r6)
        assert F.allclose(r3, r7)
        assert F.allclose(r4, r8)
        assert F.allclose(grad_edata_hm, grad_edata_ht)

190
191

if __name__ == "__main__":
192
    test_edge_softmax_unidirectional()