test_matmul.py 1.94 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
2
3
4
5
6
7
8
9
from itertools import product

import pytest
import torch

from torch_sparse.matmul import matmul
from torch_sparse.tensor import SparseTensor
import torch_scatter

rusty1s's avatar
rusty1s committed
10
from .utils import devices, grad_dtypes
rusty1s's avatar
rusty1s committed
11
12

reductions = ['sum', 'mean', 'min', 'max']
rusty1s's avatar
matmul  
rusty1s committed
13
14
15
devices = ['cpu']
grad_dtypes = [torch.float]
reductions = ['sum']
rusty1s's avatar
rusty1s committed
16
17
18


@pytest.mark.parametrize('dtype,device,reduce',
rusty1s's avatar
rusty1s committed
19
                         product(grad_dtypes, devices, reductions))
rusty1s's avatar
rusty1s committed
20
def test_spmm(dtype, device, reduce):
rusty1s's avatar
rusty1s committed
21
    src = torch.randn((10, 8), dtype=dtype, device=device)
rusty1s's avatar
rusty1s committed
22
23
    src[2:4, :] = 0  # Remove multiple rows.
    src[:, 2:4] = 0  # Remove multiple columns.
rusty1s's avatar
rusty1s committed
24
    src = SparseTensor.from_dense(src).requires_grad_()
rusty1s's avatar
rusty1s committed
25
    row, col, value = src.coo()
rusty1s's avatar
rusty1s committed
26

rusty1s's avatar
rusty1s committed
27
    other = torch.randn((2, 8, 2), dtype=dtype, device=device,
rusty1s's avatar
rusty1s committed
28
29
                        requires_grad=True)

rusty1s's avatar
rusty1s committed
30
    src_col = other.index_select(-2, col) * value.unsqueeze(-1)
rusty1s's avatar
matmul  
rusty1s committed
31
    expected = torch_scatter.scatter(src_col, row, dim=-2, reduce=reduce)
rusty1s's avatar
rusty1s committed
32
33
34
    if reduce == 'min':
        expected[expected > 1000] = 0
    if reduce == 'max':
rusty1s's avatar
rusty1s committed
35
        expected[expected < -1000] = 0
rusty1s's avatar
rusty1s committed
36

rusty1s's avatar
matmul  
rusty1s committed
37
38
    print(expected)

rusty1s's avatar
rusty1s committed
39
40
41
42
43
44
    grad_out = torch.randn_like(expected)

    expected.backward(grad_out)
    expected_grad_value = value.grad
    value.grad = None
    expected_grad_other = other.grad
rusty1s's avatar
rusty1s committed
45
46
    other.grad = None

rusty1s's avatar
rusty1s committed
47
48
    out = matmul(src, other, reduce)
    out.backward(grad_out)
rusty1s's avatar
rusty1s committed
49

rusty1s's avatar
rusty1s committed
50
51
52
    assert torch.allclose(expected, out)
    assert torch.allclose(expected_grad_value, value.grad)
    assert torch.allclose(expected_grad_other, other.grad)
rusty1s's avatar
rusty1s committed
53
54


rusty1s's avatar
matmul  
rusty1s committed
55
56
57
58
# @pytest.mark.parametrize('dtype,device', product(grad_dtypes, devices))
# def test_spspmm(dtype, device):
#     src = torch.tensor([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=dtype,
#                        device=device)
rusty1s's avatar
rusty1s committed
59

rusty1s's avatar
matmul  
rusty1s committed
60
61
62
63
#     src = SparseTensor.from_dense(src)
#     out = src @ src
#     assert out.size() == (3, 3)
#     assert out.has_value()
rusty1s's avatar
rusty1s committed
64

rusty1s's avatar
matmul  
rusty1s committed
65
66
67
68
#     src.set_value_(None)
#     out = src @ src
#     assert out.size() == (3, 3)
#     assert not out.has_value()