"vscode:/vscode.git/clone" did not exist on "7493573414ad24c2f144e3904cd2792b927ab7e2"
test_spline_conv.py 4.99 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
2
3
4
import pytest
import torch
from torch.autograd import Variable, gradcheck
from torch_spline_conv import spline_conv
5
from torch_spline_conv.functions.spline_weighting import SplineWeighting
rusty1s's avatar
rusty1s committed
6
from torch_spline_conv.functions.ffi import implemented_degrees
rusty1s's avatar
rusty1s committed
7
8
9
10
11
12
13
14
15
16
17
18
19
20

from .utils import tensors, Tensor


@pytest.mark.parametrize('tensor', tensors)
def test_spline_conv_cpu(tensor):
    x = Tensor(tensor, [[9, 10], [1, 2], [3, 4], [5, 6], [7, 8]])
    edge_index = torch.LongTensor([[0, 0, 0, 0], [1, 2, 3, 4]])
    pseudo = [[0.25, 0.125], [0.25, 0.375], [0.75, 0.625], [0.75, 0.875]]
    pseudo = Tensor(tensor, pseudo)
    weight = torch.arange(0.5, 0.5 * 25, step=0.5, out=x.new()).view(12, 2, 1)
    kernel_size = torch.LongTensor([3, 4])
    is_open_spline = torch.ByteTensor([1, 0])
    root_weight = torch.arange(12.5, 13.5, step=0.5, out=x.new()).view(2, 1)
rusty1s's avatar
rusty1s committed
21
    bias = Tensor(tensor, [1])
rusty1s's avatar
rusty1s committed
22
23

    output = spline_conv(x, edge_index, pseudo, weight, kernel_size,
rusty1s's avatar
rusty1s committed
24
                         is_open_spline, 1, root_weight, bias)
rusty1s's avatar
rusty1s committed
25
26
27
28
29
30
31
32
33

    edgewise_output = [
        1 * 0.25 * (0.5 + 1.5 + 4.5 + 5.5) + 2 * 0.25 * (1 + 2 + 5 + 6),
        3 * 0.25 * (1.5 + 2.5 + 5.5 + 6.5) + 4 * 0.25 * (2 + 3 + 6 + 7),
        5 * 0.25 * (6.5 + 7.5 + 10.5 + 11.5) + 6 * 0.25 * (7 + 8 + 11 + 12),
        7 * 0.25 * (7.5 + 4.5 + 11.5 + 8.5) + 8 * 0.25 * (8 + 5 + 12 + 9),
    ]

    expected_output = [
rusty1s's avatar
rusty1s committed
34
35
36
37
38
        (1 + 12.5 * 9 + 13 * 10 + sum(edgewise_output)) / 5,
        1 + 12.5 * 1 + 13 * 2,
        1 + 12.5 * 3 + 13 * 4,
        1 + 12.5 * 5 + 13 * 6,
        1 + 12.5 * 7 + 13 * 8,
rusty1s's avatar
rusty1s committed
39
40
    ]

rusty1s's avatar
rusty1s committed
41
    output = [pytest.approx(o, 0.01) for o in output.view(-1).tolist()]
rusty1s's avatar
rusty1s committed
42
    assert output == expected_output
rusty1s's avatar
rusty1s committed
43

rusty1s's avatar
rusty1s committed
44
    x, weight, pseudo = Variable(x), Variable(weight), Variable(pseudo)
rusty1s's avatar
rusty1s committed
45
    root_weight, bias = Variable(root_weight), Variable(bias)
rusty1s's avatar
rusty1s committed
46
47

    output = spline_conv(x, edge_index, pseudo, weight, kernel_size,
rusty1s's avatar
rusty1s committed
48
                         is_open_spline, 1, root_weight, bias)
rusty1s's avatar
rusty1s committed
49

rusty1s's avatar
rusty1s committed
50
    output = [pytest.approx(o, 0.01) for o in output.data.view(-1).tolist()]
rusty1s's avatar
rusty1s committed
51
    assert output == expected_output
rusty1s's avatar
rusty1s committed
52
53
54


def test_spline_weighting_backward_cpu():
rusty1s's avatar
rusty1s committed
55
    for degree in implemented_degrees.keys():
rusty1s's avatar
rusty1s committed
56
57
        kernel_size = torch.LongTensor([5, 5, 5])
        is_open_spline = torch.ByteTensor([1, 0, 1])
rusty1s's avatar
rusty1s committed
58
        op = SplineWeighting(kernel_size, is_open_spline, degree)
rusty1s's avatar
rusty1s committed
59

rusty1s's avatar
rusty1s committed
60
        x = torch.DoubleTensor(16, 2).uniform_(-1, 1)
rusty1s's avatar
rusty1s committed
61
        x = Variable(x, requires_grad=True)
rusty1s's avatar
rusty1s committed
62
        pseudo = torch.DoubleTensor(16, 3).uniform_(0, 1)
rusty1s's avatar
rusty1s committed
63
        pseudo = Variable(pseudo, requires_grad=True)
rusty1s's avatar
rusty1s committed
64
        weight = torch.DoubleTensor(25, 2, 4).uniform_(-1, 1)
rusty1s's avatar
rusty1s committed
65
        weight = Variable(weight, requires_grad=True)
rusty1s's avatar
rusty1s committed
66

rusty1s's avatar
rusty1s committed
67
        assert gradcheck(op, (x, pseudo, weight), eps=1e-6, atol=1e-4) is True
rusty1s's avatar
rusty1s committed
68
69
70
71


@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
@pytest.mark.parametrize('tensor', tensors)
rusty1s's avatar
rusty1s committed
72
def test_spline_conv_gpu(tensor):  # pragma: no cover
rusty1s's avatar
rusty1s committed
73
74
75
76
77
78
79
80
81
82
83
    x = Tensor(tensor, [[9, 10], [1, 2], [3, 4], [5, 6], [7, 8]])
    edge_index = torch.LongTensor([[0, 0, 0, 0], [1, 2, 3, 4]])
    pseudo = [[0.25, 0.125], [0.25, 0.375], [0.75, 0.625], [0.75, 0.875]]
    pseudo = Tensor(tensor, pseudo)
    weight = torch.arange(0.5, 0.5 * 25, step=0.5, out=x.new()).view(12, 2, 1)
    kernel_size = torch.LongTensor([3, 4])
    is_open_spline = torch.ByteTensor([1, 0])
    root_weight = torch.arange(12.5, 13.5, step=0.5, out=x.new()).view(2, 1)
    bias = Tensor(tensor, [1])

    expected_output = spline_conv(x, edge_index, pseudo, weight, kernel_size,
rusty1s's avatar
rusty1s committed
84
                                  is_open_spline, 1, root_weight, bias)
rusty1s's avatar
rusty1s committed
85
86
87
88
89
90
91

    x, edge_index, pseudo = x.cuda(), edge_index.cuda(), pseudo.cuda()
    weight, kernel_size = weight.cuda(), kernel_size.cuda()
    is_open_spline, root_weight = is_open_spline.cuda(), root_weight.cuda()
    bias = bias.cuda()

    output = spline_conv(x, edge_index, pseudo, weight, kernel_size,
rusty1s's avatar
rusty1s committed
92
                         is_open_spline, 1, root_weight, bias)
rusty1s's avatar
rusty1s committed
93
    assert output.cpu().tolist() == expected_output.tolist()
rusty1s's avatar
rusty1s committed
94

rusty1s's avatar
rusty1s committed
95
96
97
98
99
100
101
102
    x, weight, pseudo = Variable(x), Variable(weight), Variable(pseudo)
    root_weight, bias = Variable(root_weight), Variable(bias)

    output = spline_conv(x, edge_index, pseudo, weight, kernel_size,
                         is_open_spline, 1, root_weight, bias)

    assert output.data.cpu().tolist() == expected_output.tolist()

rusty1s's avatar
rusty1s committed
103

rusty1s's avatar
rusty1s committed
104
@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
rusty1s's avatar
rusty1s committed
105
def test_spline_weighting_backward_gpu():  # pragma: no cover
rusty1s's avatar
rusty1s committed
106
107
108
109
110
111
112
113
114
115
116
117
118
    for degree in implemented_degrees.keys():
        kernel_size = torch.cuda.LongTensor([5, 5, 5])
        is_open_spline = torch.cuda.ByteTensor([1, 0, 1])
        op = SplineWeighting(kernel_size, is_open_spline, degree)

        x = torch.cuda.DoubleTensor(16, 2).uniform_(-1, 1)
        x = Variable(x, requires_grad=True)
        pseudo = torch.cuda.DoubleTensor(16, 3).uniform_(0, 1)
        pseudo = Variable(pseudo, requires_grad=False)  # TODO
        weight = torch.cuda.DoubleTensor(25, 2, 4).uniform_(-1, 1)
        weight = Variable(weight, requires_grad=True)

        assert gradcheck(op, (x, pseudo, weight), eps=1e-6, atol=1e-4) is True